Improve reading QR and PDF417 code with AVFoundation swift - swift

I'm trying to read PDF-417 and QR code and my code works but when there is less light or blurred image does not work. So I am looking for information about how may improve reading code.
I tried changing the settings on the camera: AVCaptureSession Increased the resolution settings and the AVCaptureDevice Increased videoZoomFactor but apparently I'm not doing it correctly.
I hope you can help me.
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
let session : AVCaptureSession = AVCaptureSession()
var previewLayer : AVCaptureVideoPreviewLayer!
var highlightView : UIView = UIView()
override func viewDidLoad() {
super.viewDidLoad()
/
self.highlightView.autoresizingMask = UIViewAutoresizing.FlexibleTopMargin |
UIViewAutoresizing.FlexibleBottomMargin |
UIViewAutoresizing.FlexibleLeftMargin |
UIViewAutoresizing.FlexibleRightMargin
/
self.highlightView.layer.borderColor = UIColor.greenColor().CGColor
self.highlightView.layer.borderWidth = 3
/
self.view.addSubview(self.highlightView)
/
let device = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
/
/
var error : NSError? = nil
let input : AVCaptureDeviceInput? = AVCaptureDeviceInput.deviceInputWithDevice(device, error: &error) as? AVCaptureDeviceInput
/
if input != nil {
session.addInput(input)
}
else {
/
println(error)
}
let output = AVCaptureMetadataOutput()
output.setMetadataObjectsDelegate(self, queue: dispatch_get_main_queue())
session.addOutput(output)
output.metadataObjectTypes = output.availableMetadataObjectTypes
previewLayer = AVCaptureVideoPreviewLayer.layerWithSession(session) as! AVCaptureVideoPreviewLayer
previewLayer.frame = self.view.bounds
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.view.layer.addSublayer(previewLayer)
/
session.startRunning()
}
/
func captureOutput(captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [AnyObject]!, fromConnection connection: AVCaptureConnection!) {
var highlightViewRect = CGRectZero
var barCodeObject : AVMetadataMachineReadableCodeObject!
var detectionString : String!
let barCodeTypes = [
AVMetadataObjectTypePDF417Code,
AVMetadataObjectTypeQRCode
]
/
for metadata in metadataObjects {
for barcodeType in barCodeTypes {
if metadata.type == barcodeType
{
barCodeObject = self.previewLayer.transformedMetadataObjectForMetadataObject(metadata as! AVMetadataMachineReadableCodeObject) as! AVMetadataMachineReadableCodeObject
highlightViewRect = barCodeObject.bounds
detectionString = (metadata as! AVMetadataMachineReadableCodeObject).stringValue
self.session.stopRunning()
self.alert(detectionString)
break
}
}
}
println(detectionString)
self.highlightView.frame = highlightViewRect
self.view.bringSubviewToFront(self.highlightView)
}
func alert(Code: String)
{
let actionSheet:UIAlertController = UIAlertController(title: "Barcode", message: "\(Code)", preferredStyle: UIAlertControllerStyle.Alert)
let firstAlertAction:UIAlertAction = UIAlertAction(title: "OK", style: UIAlertActionStyle.Default, handler:
{
(alertAction:UIAlertAction!) in
self.session.startRunning()
})
actionSheet.addAction(firstAlertAction)
self.presentViewController(actionSheet, animated: true, completion: nil)
}
}

I struggled with unconsistent reading of qr-codes, what worked for me was to get the qr-code as proportional as possible. (If reading from another iphone/ipad screen) If that doesnt help you, the http://www.appcoda.com/qr-code-reader-swift/ <-- helped me out
cheers

Related

How to show scanned QRCode user profile in my app in swift

First time i am working with QRCode
in our app i need to show the scanned user profile in my app when i scan that user QRCode in my app.. how to show scanned user profile in my app
i have written code for scan the user QRCode.. if i scan correct QRCode then here how to show that person profile in my app
code for scan the qrcode scanner:
import UIKit
import AVFoundation
class QRCodeViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var video = AVCaptureVideoPreviewLayer()
let session = AVCaptureSession()
#IBOutlet weak var qrCode: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
guard let captureDevice = AVCaptureDevice.default(for: AVMediaType.video) else { return }
do{
let input = try AVCaptureDeviceInput(device: captureDevice)
session.addInput(input)
}catch{print("error")}
let output = AVCaptureMetadataOutput()
session.addOutput(output)
output.setMetadataObjectsDelegate(self, queue:DispatchQueue.main)
output.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
video = AVCaptureVideoPreviewLayer(session: session)
video.frame = view.layer.bounds
view.layer.addSublayer(video)
self.view.bringSubviewToFront(qrCode)
session.startRunning()
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
if metadataObjects != nil && metadataObjects.count != 0{
if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject{
if object.type == AVMetadataObject.ObjectType.qr{
let alert = UIAlertController(title: "QR Code", message: object.stringValue, preferredStyle: .alert)
alert.addAction(UIAlertAction(title:"Retake", style: .default, handler: nil))
alert.addAction(UIAlertAction(title:"Confirm", style: .default, handler: { (nil) in
// self.scannedCode = object.stringValue!
self.session.stopRunning() }))
present(alert, animated: true, completion: nil)
}
}
}
}
}
by using above code if i scan correct QRCode of user, then how to show that person profile in my screen.. please suggest me..

Several errors in my Swift code for a camera app

I am trying to create a camera app on Xcode 10.1 using Swift for a school project. I have been working on this for a while, and still have several errors.
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
let captureSession = AVCaptureSession()
var previewLayer:CALayer!
var captureDevice:AVCaptureDevice?
var takePhoto = false
override func viewDidLoad() {
super.viewDidLoad()
prepareCamera()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
prepareCamera()
}
func prepareCamera() {
captureSession.sessionPreset = AVCaptureSession.Preset.photo
if let availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.back) {
//if availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType:AVMediaType.video, position: .back).devices {
//let captureDevice = availableDevices
beginSession()
}
}
func beginSession () {
do {
let captureDeviceInput = try AVCaptureDeviceInput( device: captureDevice)
captureSession.addInput(captureDeviceInput)
} catch {
print(error.localizedDescription)
}
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
self.previewLayer = previewLayer
self.view.layer.addSublayer(self.previewLayer)
self.previewLayer.frame = self.view.layer.frame
captureSession.startRunning()
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString): NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput) {
captureSession.addOutput(dataOutput)
}
captureSession.commitConfiguration()
let queue = DispatchQueue(label: "com.sophiaradis.captureQueue")
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
}
#IBAction func takePhoto(_ sender: Any) {
takePhoto = true
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if takePhoto {
takePhoto = false
if let image = self.getImageFromSamplyBuffer(buffer: sampleBuffer){
let photoVC = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "PhotoVC") as! PhotoViewController
photoVC.takenPhoto = image
DispatchQueue.main.async {
self.present(photoVC, animated: true, completion: {
self.stopCaptureSession()
})
}
}
}
}
func getImageFromSamplyBuffer (buffer:CMSampleBuffer) -> UIImage? {
if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext()
let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))
if let image = context.createCGImage(ciImage, from: imageRect) {
return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .right)
}
}
return nil
}
func stopCaptureSession () {
self.captureSession.stopRunning()
if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
for input in inputs {
self.captureSession.removeInput(input)
}
}
}
}
There is an error in these lines of code:
if let availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes:[AVCaptureDevice.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.back)
This error says that type AVCaptureDevice has no member Discovery Session. But when I looked online, it did.
There is a second error in these lines that follow that I cannot convert value of type 'AVCaptureDevice?' to expected argument type 'AVCaptureDevice'.
func beginSession () {
do {
let captureDeviceInput = try AVCaptureDeviceInput( device: captureDevice)
I have no idea how to fix this one at all. My next error occurs directly below that one, in these following lines
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
self.previewLayer = previewLayer
self.view.layer.addSublayer(self.previewLayer)
self.previewLayer.frame = self.view.layer.frame
captureSession.startRunning()
And this is flagged as that Initializer for conditional binding must have Optional type, not 'AVCaptureVideoPreviewLayer'.
If you can fix or even offer advice as how to fix any of these it will mean a lot to me and really make my year.
1-
let availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.back)
2- if let captureDevice or force unwrap captureDevice!
let captureDeviceInput = try AVCaptureDeviceInput( device: captureDevice!)
3- AVCaptureVideoPreviewLayer doesn't return optional , so replace
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
with
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)

Unsupported type found - use -availableMetadataObjectTypes' issue

I am trying to create QR Reader. However, when I open the window with scanner, it crashes with error "Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '*** -[AVCaptureMetadataOutput setMetadataObjectTypes:] Unsupported type found - use -availableMetadataObjectTypes'"
This is my code:
import UIKit
import AVFoundation
import Alamofire
import SwiftyJSON
class CameraTwoViewController: UIViewController,
AVCaptureMetadataOutputObjectsDelegate {
#IBOutlet weak var square: UIImageView!
var video = AVCaptureVideoPreviewLayer()
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
//Creating session
let session = AVCaptureSession()
//Define capture devcie
let captureDevice = AVCaptureDevice.default(for: .video)
do
{
let input = try AVCaptureDeviceInput(device: captureDevice!)
}
catch
{
print ("ERROR")
}
let output = AVCaptureMetadataOutput()
session.addOutput(output)
output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
output.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
//output.metadataObjectTypes = [AVMetadataObject.availableMetadataObjectTypes.qr]
video = AVCaptureVideoPreviewLayer(session: session)
video.frame = view.layer.bounds
view.layer.addSublayer(video)
self.view.bringSubview(toFront: square)
session.startRunning()
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
if metadataObjects != nil && metadataObjects.count != 0 {
if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject {
if object.type == AVMetadataObject.ObjectType.qr {
let alert = UIAlertController(title: "Your code is:", message: object.stringValue, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Retake", style: .default, handler: nil))
alert.addAction(UIAlertAction(title: "Copy", style: .default, handler: { (nil) in
UIPasteboard.general.string = object.stringValue
}))
present(alert, animated: true, completion: nil)
}
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
Thank you in advance!
Try to add input to the session before you add output. Something like that:
func configureScanner() {
guard let captureDevice = AVCaptureDevice.default(for: .video) else {
return
}
var input: AVCaptureDeviceInput?
do {
input = try AVCaptureDeviceInput(device: captureDevice)
} catch let error {
print(error.localizedDescription)
}
guard let indeedInput = input else {
return
}
captureSession = AVCaptureSession()
captureSession!.addInput(indeedInput)
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession!.addOutput(captureMetadataOutput)
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
...
}

swift4 qr reader working but no result

swift4 qr-code reader and suppose to popup an alert, but it didn't show the popup alert.
The session start running properly, but the func captureOutput() seems not trigger when qr-code scanned.
my code:
import UIKit
import AVFoundation
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
#IBOutlet weak var square: UIImageView!
//Creating session
var video = AVCaptureVideoPreviewLayer()
override func viewDidLoad() {
super.viewDidLoad()
//Creating session
let session = AVCaptureSession()
//Define capture devcie
let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
do
{let input = try AVCaptureDeviceInput(device: captureDevice!)
session.addInput(input)}
catch
{ print ("ERROR")}
let output = AVCaptureMetadataOutput()
session.addOutput(output)
output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
output.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
video = AVCaptureVideoPreviewLayer(session: session)
video.frame = view.layer.bounds
view.layer.addSublayer(video)
self.view.bringSubview(toFront: square)
session.startRunning()
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
if metadataObjects != nil && metadataObjects.count != 0
{
if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject
{
if object.type == AVMetadataObject.ObjectType.qr
{
let alert = UIAlertController(title: "QR Code", message: object.stringValue, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Retake", style: .default, handler: nil))
alert.addAction(UIAlertAction(title: "Copy", style: .default, handler: { (nil) in
UIPasteboard.general.string = object.stringValue
}))
present(alert, animated: true, completion: nil)
}
}
}
}
}
Your scanning code check will be in the following delegate:
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection)
Your code will be like this:
import UIKit
import AVFoundation
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
#IBOutlet weak var square: UIImageView!
//Creating session
var video = AVCaptureVideoPreviewLayer()
override func viewDidLoad() {
super.viewDidLoad()
//Creating session
let session = AVCaptureSession()
//Define capture devcie
let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
do
{let input = try AVCaptureDeviceInput(device: captureDevice!)
session.addInput(input)}
catch
{ print ("ERROR")}
let output = AVCaptureMetadataOutput()
session.addOutput(output)
output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
output.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
video = AVCaptureVideoPreviewLayer(session: session)
video.frame = view.layer.bounds
view.layer.addSublayer(video)
self.view.bringSubview(toFront: square)
session.startRunning()
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
if metadataObjects != nil && metadataObjects.count != 0
{
if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject
{
if object.type == AVMetadataObject.ObjectType.qr
{
let alert = UIAlertController(title: "QR Code", message: object.stringValue, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Retake", style: .default, handler: nil))
alert.addAction(UIAlertAction(title: "Copy", style: .default, handler: { (nil) in
UIPasteboard.general.string = object.stringValue
}))
present(alert, animated: true, completion: nil)
}
}
}
}
}

AVPlayer is not working when I'm trying to use with this YTVimeoExtractor

When I try to play the video through the AVPlayer, the video loads for some time(the loading symbol appears at the top of the player) then suddenly it stops and the play icon with a crossover is shown. Don't know what is wrong? I can get the video informations but I can't make the video to play.
I will show what I have done. Could anyone answer my question, help is much appreciated.
import UIKit
import AVKit
import AVFoundation
class ViewController: UIViewController {
var playerVC : AVPlayerViewController!
var playerItem : AVPlayerItem!
var player : AVPlayer!
var playerLayer: AVPlayerLayer!
#IBOutlet weak var videoURL: UITextField!
#IBOutlet weak var videoTitle: UILabel!
#IBAction func playVideo(sender: AnyObject) {
YTVimeoExtractor.sharedExtractor().fetchVideoWithVimeoURL(self.videoURL.text!, withReferer: nil, completionHandler: {(video, error) -> Void in
if video != nil {
// https://vimeo.com/165891648
self.videoTitle.text = video?.title
print("hello: \(self.videoTitle.text)")
let url = NSURL(string: self.videoURL.text!)
// let url = NSURL.init(fileURLWithPath: self.videoURL.text!)
self.playerItem = AVPlayerItem.init(URL: url!)
self.player = AVPlayer.init(playerItem: self.playerItem)
self.playerVC = AVPlayerViewController.init();
self.playerVC.player = self.player;
self.player.currentItem!.playbackLikelyToKeepUp
self.presentViewController(self.playerVC, animated: true) { () -> Void in
self.playerVC.player?.play()
}
}else {
let alert = UIAlertController(title: error!.localizedDescription, message: error!.localizedFailureReason, preferredStyle: UIAlertControllerStyle.Alert)
alert.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.Default, handler: nil))
self.presentViewController(alert, animated: true, completion: nil)
}
})
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
}
override func viewDidAppear(animated: Bool) {
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
This is the one I get on the simulator.
I used this https://github.com/lilfaf/YTVimeoExtractor one to do this sample project. You can try it out and let me know.
I had the same issue and fix using this way:
#IBAction func btnDownload_touchUpInside(_ sender: UIButton) {
YTVimeoExtractor.shared().fetchVideo(withVimeoURL: self.videoURL.text!, withReferer: nil, completionHandler: {(video, error) -> Void in
if video != nil {
self.videoTitle.text = video?.title
if let streamUrls = video?.streamURLs
{
var streamURL: String?
var streams : [String:String] = [:]
for (key,value) in streamUrls {
streams["\(key)"] = "\(value)"
print("\(key) || \(value)")
}
if let large = streams["720"]
{
streamURL = large
}
else if let high = streams["480"]
{
streamURL = high
}
else if let medium = streams["360"]
{
streamURL = medium
}
else if let low = streams["270"]
{
streamURL = low
}
if let url = streamURL
{
Alamofire.download(url, to: { (temporaryURL, response) -> (destinationURL: URL, options: DownloadRequest.DownloadOptions) in
if let directoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as? NSURL {
let fileName = response.suggestedFilename!
let finalPath = directoryURL.appendingPathComponent(fileName)
self.downloadedVideoPath = finalPath?.absoluteString
return (finalPath!, DownloadRequest.DownloadOptions(rawValue: 2))
}
}).downloadProgress(closure: { (progress) in
print("Progress: \(progress.fractionCompleted)")
})
}
}
}
})
}
#IBAction func btnPlayOffLine_touchUpInside(_ sender: UIButton) {
YTVimeoExtractor.shared().fetchVideo(withVimeoURL: self.videoURL.text!, withReferer: nil, completionHandler: {(video, error) -> Void in
if video != nil {
let videoURL = NSURL(string: self.downloadedVideoPath!)
let player = AVPlayer(url: videoURL! as URL)
let playerViewController = AVPlayerViewController()
playerViewController.player = player
self.present(playerViewController, animated: true) {
playerViewController.player!.play()
}
}
})
}
#IBAction func btnPlayOnLine_touchUpInside(_ sender: UIButton) {
YTVimeoExtractor.shared().fetchVideo(withVimeoURL: self.videoURL.text!, withReferer: nil, completionHandler: {(video, error) -> Void in
if video != nil {
self.videoTitle.text = video?.title
if let streamUrls = video?.streamURLs
{
var streamURL: String?
var streams : [String:String] = [:]
for (key,value) in streamUrls {
streams["\(key)"] = "\(value)"
print("\(key) || \(value)")
}
if let large = streams["720"]
{
streamURL = large
}
else if let high = streams["480"]
{
streamURL = high
}
else if let medium = streams["360"]
{
streamURL = medium
}
else if let low = streams["270"]
{
streamURL = low
}
if let url = streamURL
{
let videoURL = NSURL(string: url)
let player = AVPlayer(url: videoURL! as URL)
let playerViewController = AVPlayerViewController()
playerViewController.player = player
self.present(playerViewController, animated: true) {
playerViewController.player!.play()
}
}
}
}
})
}
}