Use of unresolved identifier "scanQRCode" - swift

I created a project with tab bars in Obj-C XCode and then I just want import a barcode scanner app to my project. I got finally a error when i told myself "Yes it will work". Can you help to figure it out ?
import UIKit
import AVFoundation
class Barcode: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
#IBOutlet var videoPreview: UIView!
var stringURL = String()
enum error: Error {
case noCameraAvailable
case videoInputInitFail
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
do{
try scanQRCode() /* I take error message right here "Use unresolved identifier"*/
}
catch{
print("Tarama Gerçekleşemedi!")
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection){
if metadataObjects.count > 0 {
let machineReadableCode = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if machineReadableCode.type == AVMetadataObjectTypeQRCode {
stringURL = machineReadableCode.stringValue!
performSegue(withIdentifier: "openLink", sender: self)
}
}
}
func scanQRCode()
throws {
let avCaptureSession = AVCaptureSession()
guard let avCaptureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) else{
print("Kamera Bulunamadı!")
throw error.noCameraAvailable
}
guard let avCaptureInput = try? AVCaptureDeviceInput(device: avCaptureDevice)
else{
print("Kameraya Erişilemedi!")
throw error.videoInputInitFail
}
let avCaptureMetadataOutput = AVCaptureMetadataOutput()
avCaptureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
avCaptureSession.addInput(avCaptureInput)
avCaptureSession.addOutput(avCaptureMetadataOutput)
avCaptureMetadataOutput.metadataObjectTypes = [AVMetadataObjectTypeQRCode]
let avCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: avCaptureSession)
avCaptureVideoPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
avCaptureVideoPreviewLayer?.frame = videoPreview.bounds
self.videoPreview.layer.addSublayer(avCaptureVideoPreviewLayer!)
avCaptureSession.startRunning()
}
func prepare(for segue: UIStoryboardSegue, sender: Any?){
if segue.identifier == "openLink"{
let destination = segue.destination as! WebViewController
destination.url = URL(string: stringURL)
}
}
}
}
My main ViewController classes are Obj-C but i created barcode.swift for barcode-tab. This error about it or not ?

You have created that function within another function, didReceiveMemoryWarning. which you shouldn't.
Move scanQRCode and all other functions you have declared inside didReceiveMemoryWarning outside of it.
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning().
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection){
//...
}
func scanQRCode() throws {
//...
}
func prepare(for segue: UIStoryboardSegue, sender: Any?){
//...
}

Related

How to made a UIViewController class reusable to pass data back to a viewController that calls it

I was using the code from the following site
https://www.hackingwithswift.com/example-code/media/how-to-scan-a-qr-code
The code works perfectly, the code can be viewed by accessing the link above.
It was a code that capture a QRCode/BarCode from camera and convert it to string.
The part of the the code that shows the string is:
func found(code: String) {
print(code)
}
After that the code string is "printed", the code calls "dismiss" and return to the previous UIViewController.
I want to get the "code" string and get the data to the previous UIViewController.
The only way that I am able to do that now is using the following code:
func found(code: String) {
print("code: \(code)")
ResenhaEquideoIdentificaAnimal1Controller.shared.microchipAnimalTextField.text = code
}
But this code only works if the code is called by the "ResenhaEquideoIdentificaAnimal1Controller" class.
I use the following code to call the new UIViewController inside the "ResenhaEquideoIdentificaAnimal1Controller" class using a UIButton.
let myScannerViewController = MyScannerViewController()
present(myScannerViewController, animated: true, completion: nil)
How can I made this class reusable to be able to call the "MyScannerViewController" class
and send data back to the view that calls it?
You want to use a "delegate patten", that is, when the code is found or something went wrong, you delegate the functionality to some other party to deal with it.
For example, you could modify the existing example to add support for a simple delegate...
import AVFoundation
import UIKit
protocol ScannerDelegate: AnyObject {
func scanner(_ controller: ScannerViewController, didDiscoverCode code: String)
func failedToScanner(_ controller: ScannerViewController)
}
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
weak var scannerDelegate: ScannerDelegate?
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
private func failed() {
captureSession = nil
scannerDelegate?.failedToScanner(self)
}
private func didFind(code: String) {
scannerDelegate?.scanner(self, didDiscoverCode: code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
// MARK: AVCaptureMetadataOutputObjectsDelegate
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
didFind(code: stringValue)
}
}
}
When you want to scan something, your calling view controller could adopt the protocol...
extension ViewController: ScannerDelegate {
func failedToScanner(_ controller: ScannerViewController) {
controller.dismiss(animated: true) {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
self.present(ac, animated: true)
}
}
func scanner(_ controller: ScannerViewController, didDiscoverCode code: String) {
codeLabel.text = code
controller.dismiss(animated: true)
}
}
and when you wanted to present the scanner view controller, you would simply set the view controller as the delegate...
let controller = ScannerViewController()
controller.scannerDelegate = self
present(controller, animated: true)
The great thing about this is, you could easily reject the code if you weren't interested in simply by modifying the delegate workflow

ORSSerialPort with Arduino

I've been trying for a long time to program an Xcode interface to communicate with my Arduino Mega. but the whole thing didn't work as well as intended. I did the whole thing with ORSSerialPort.
In the Xcode project I wrote this for the swift file ViewController.swift :
import Cocoa
import ORSSerial
class ViewController: NSViewController, ORSSerialPortDelegate {
var serialPort = ORSSerialPort(path: "/dev/cu.usbmodem142101")
func SendString(data: String){
let stringData = Data(data.utf8)
serialPort?.send(stringData)
}
func openPort(){
serialPort?.baudRate=9600
serialPort?.delegate=self
serialPort?.parity = .none
serialPort?.numberOfStopBits = 1
serialPort?.open()
print("serialport is open")
}
func closePort(){
serialPort?.delegate=nil
serialPort?.close()
print("serialport is close")
}
override func viewDidLoad() {
super.viewDidLoad()
}
override var representedObject: Any? {
didSet {
}
}
#IBAction func onButton(_ sender: Any) {
openPort()
}
#IBAction func OffButton(_ sender: Any) {
closePort()
}
#IBAction func SendButton(_ sender: Any) {
SendString(data: "stringdata blablabla")
}
func serialPortWasOpened(_ serialPort: ORSSerialPort) {
print("serialPort to \(serialPort) is run")
}
func serialPortWasRemovedFromSystem(_ serialPort: ORSSerialPort) {
self.serialPort = nil
}
}
and this code i have load on the Arduino mega:
String angel;
void setup() {
Serial.begin(9600);
}
void loop() {
angel = Serial.readString();
Serial.println(angel);
delay(350);
}
unfortunately it doesn't work and I don't know why.
Your question doesn't provide any detail about what part(s) don't work, but there's one definite problem.
Your Arduino program looks like it echos everything it receives on the serial port back on the same port. In order to see that on the computer, you'll have to implement the serialPort(_:didReceive:) method in your view controller. Something like this:
func serialPort(_ serialPort: ORSSerialPort, didReceive data: Data) {
guard let string = String(data: data, encoding: .ascii) else { return; }
print("Received: \(string)")
}

How do I pass a scanned barcode ID from first view controller to second View Controller's UILabel?

This is the barcode scanning tutorial I used in my program, so that you have a lot more context when you read my code: Link
Here is what my program does so far: Essentially, when I scan an item's barcode with my phone, the UIAlert pops up with the barcode ID displayed and a button prompting the user to open the "Results" page. This is all fine and good, but how do I pass that same scanned barcode ID into a label on the Result's page? I have been stuck on this for 2 days now, even though it seems like such an easy task.
Any help is much appreciated <3
Here is my relevant code:
ProductCatalog.plist ->
Link to Image
Scanner_ViewController.swift (first View Controller) ->
import UIKit
import AVFoundation
class Scanner_ViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate, ScannerDelegate
{
private var scanner: Scanner?
override func viewDidLoad()
{
super.viewDidLoad()
self.scanner = Scanner(withDelegate: self)
guard let scanner = self.scanner else
{
return
}
scanner.requestCaptureSessionStartRunning()
}
override func didReceiveMemoryWarning()
{
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// Mark - AVFoundation delegate methods
public func metadataOutput(_ output: AVCaptureMetadataOutput,
didOutput metadataObjects: [AVMetadataObject],
from connection: AVCaptureConnection)
{
guard let scanner = self.scanner else
{
return
}
scanner.metadataOutput(output,
didOutput: metadataObjects,
from: connection)
}
// Mark - Scanner delegate methods
func cameraView() -> UIView
{
return self.view
}
func delegateViewController() -> UIViewController
{
return self
}
func scanCompleted(withCode code: String)
{
print(code)
showAlert_Success(withTitle: (code))
}
private func showAlert_Success(withTitle title: String)
{
let alertController = UIAlertController(title: title, message: "Product has been successfully scanned", preferredStyle: .alert)
// programatically segue to the next view controller when the UIAlert pops up
alertController.addAction(UIAlertAction(title:"Get Results", style: .default, handler:{ action in self.performSegue(withIdentifier: "toAnalysisPage", sender: self) }))
present(alertController, animated: true)
}
}
Scanner.Swift (accompanies Scanner_ViewController.swift)->
import Foundation
import UIKit
import AVFoundation
protocol ScannerDelegate: class
{
func cameraView() -> UIView
func delegateViewController() -> UIViewController
func scanCompleted(withCode code: String)
}
class Scanner: NSObject
{
public weak var delegate: ScannerDelegate?
private var captureSession : AVCaptureSession?
init(withDelegate delegate: ScannerDelegate)
{
self.delegate = delegate
super.init()
self.scannerSetup()
}
private func scannerSetup()
{
guard let captureSession = self.createCaptureSession()
else
{
return
}
self.captureSession = captureSession
guard let delegate = self.delegate
else
{
return
}
let cameraView = delegate.cameraView()
let previewLayer = self.createPreviewLayer(withCaptureSession: captureSession,
view: cameraView)
cameraView.layer.addSublayer(previewLayer)
}
private func createCaptureSession() -> AVCaptureSession?
{
do
{
let captureSession = AVCaptureSession()
guard let captureDevice = AVCaptureDevice.default(for: .video) else
{
return nil
}
let deviceInput = try AVCaptureDeviceInput(device: captureDevice)
let metaDataOutput = AVCaptureMetadataOutput()
// add device input
if captureSession.canAddInput(deviceInput) && captureSession.canAddOutput(metaDataOutput)
{
captureSession.addInput(deviceInput)
captureSession.addOutput(metaDataOutput)
guard let delegate = self.delegate,
let viewController = delegate.delegateViewController() as? AVCaptureMetadataOutputObjectsDelegate else
{
return nil
}
metaDataOutput.setMetadataObjectsDelegate(viewController,
queue: DispatchQueue.main)
metaDataOutput.metadataObjectTypes = self.metaObjectTypes()
return captureSession
}
}
catch
{
// handle error
}
return nil
}
private func createPreviewLayer(withCaptureSession captureSession: AVCaptureSession,
view: UIView) -> AVCaptureVideoPreviewLayer
{
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
return previewLayer
}
private func metaObjectTypes() -> [AVMetadataObject.ObjectType]
{
return [.qr,
.code128,
.code39,
.code39Mod43,
.code93,
.ean13,
.ean8,
.interleaved2of5,
.itf14,
.pdf417,
.upce
]
}
public func metadataOutput(_ output: AVCaptureMetadataOutput,
didOutput metadataObjects: [AVMetadataObject],
from connection: AVCaptureConnection)
{
self.requestCaptureSessionStopRunning()
guard let metadataObject = metadataObjects.first,
let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject,
let scannedValue = readableObject.stringValue,
let delegate = self.delegate
else
{
return
}
delegate.scanCompleted(withCode: scannedValue)
}
public func requestCaptureSessionStartRunning()
{
self.toggleCaptureSessionRunningState()
}
public func requestCaptureSessionStopRunning()
{
self.toggleCaptureSessionRunningState()
}
private func toggleCaptureSessionRunningState()
{
guard let captureSession = self.captureSession
else
{
return
}
if !captureSession.isRunning
{
captureSession.startRunning()
}
else
{
captureSession.stopRunning()
}
}
}
Analysis_ViewController.swift (second view controller) ->
Right now, the forKey: has been hard-coded to item ID 8710908501708 because I have no idea how to actually pass camera-scanned ID's into the second View Controller :/
import UIKit
class Analysis_ViewController: UIViewController
{
#IBOutlet weak var productTitle: UILabel!
func getData()
{
let path = Bundle.main.path(forResource:"ProductCatalog", ofType: "plist")
let dict:NSDictionary = NSDictionary(contentsOfFile: path!)!
if (dict.object(forKey: "8710908501708" as Any) != nil)
{
if let levelDict:[String : Any] = dict.object(forKey: "8710908501708" as Any) as? [String : Any]
{
// use a for loop to iterate through all the keys and values in side the "Levels" dictionary
for (key, value) in levelDict
{
// if we find a key named whatever we care about, we can print out the value
if (key == "name")
{
productTitle.text = (value as! String)
}
}
}
}
}
// listing the better options that are safer in comparison to the scanned product image
override func viewDidLoad()
{
super.viewDidLoad()
getData()
}
}
Do you have a variable to hold the scanned ID in your view controllers? If not, you can add var itemID: String? to both Scanner_ViewController and Analysis_ViewController.
Then in your func where you get the scanned code, you can set it to the variable.
func scanCompleted(withCode code: String) {
print(code)
itemID = code // Saves the scanned code to your var
showAlert_Success(withTitle: (code))
}
For passing data to another view controller via segue, you might want to look into this UIViewController method for segues: documentation here. This answer also might help.
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "toAnalysisPage" {
if let viewController = segue.destination as? Analysis_ViewController {
viewController.itemID = itemID
}
}
}

Live stream using AVPlayer not playing in iOS 11

I am trying to stream a music from remote url. I am trying to run this in iOS 11 but it not play the music.
ViewController
var session = AVAudioSession.sharedInstance()
var LQPlayer: AVPlayer?
let LOW_URL = URL(string: "http://someLInk.pls")! // not an original url provided at this time.
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
self.avPlayerSetup()
}
func avPlayerSetup() {
do {
try session.setCategory(AVAudioSessionCategoryPlayback)
try session.overrideOutputAudioPort(.none)
try session.setActive(true)
} catch {
print("AVPlayer setup error \(error.localizedDescription)")
}
}
func initPlayer() {
LQPlayer = AVPlayer(url: LOW_URL)
print("player allocated")
}
func deAllocPlayer() {
LQPlayer = nil
print("player deallocated")
}
#IBAction func playBtn(_ sender: Any) {
initPlayer()
LQPlayer?.play()
}
#IBAction func pauseBtn(_ sender: Any) {
LQPlayer?.pause()
deAllocPlayer()
}
}
I set Allow Arbitrary Loads YES in info.plist.
Above code the URL I given is dummy. Actual url is working fine.
Working Code with Live Video Stream
#IBOutlet weak var player_View: UIView!
var LQPlayer: AVPlayer?
let LOW_URL = URL(string:"http://www.streambox.fr/playlists/test_001/stream.m3u8")!
override func viewDidLoad() {
super.viewDidLoad()
self.avPlayerSetup()
LQPlayer = AVPlayer.init(url: LOW_URL)
let avPlayerView = AVPlayerViewController()
avPlayerView.view.frame = self.player_View.bounds
avPlayerView.player = LQPlayer
self.player_View.addSubview(avPlayerView.view)
}
func avPlayerSetup() {
do {
try audioSession.setCategory(AVAudioSessionCategoryPlayback)
try audioSession.overrideOutputAudioPort(AVAudioSessionPortOverride.speaker)
try audioSession.setActive(true)
} catch {
print("AVPlayer setup error \(error.localizedDescription)")
}
}
func initPlayer() {
LQPlayer = AVPlayer(url:LOW_URL)
print("player allocated")
}
func deAllocPlayer() {
LQPlayer = nil
print("player deallocated")
}
#IBAction func playBtn(_ sender: Any) {
// initPlayer()
LQPlayer?.play()
}
#IBAction func pauseBtn(_ sender: Any) {
LQPlayer?.pause()
deAllocPlayer()
}

Swift: AudioPlayerDidFinished will not be called

My AudioPlayerDidFinishPlaying will not be called after audio has finished. I know it has something to do with my delegate, but I can't fix it on my own.
Can somebody give me some tips? I Googled a lot and I found other questions here with the same issue but it didn't work for me.
Thanks
import UIKit
import Parse
import AVFoundation
class ViewControllerMies: UIViewController, AVAudioPlayerDelegate {
var timer = NSTimer()
var player: AVAudioPlayer = AVAudioPlayer()
var currentStateAudio = ""
var oldAudio = String()
func startTimer() {
if player.playing == false {
print("Tijd voor de Spice Girls")
}
let query = PFQuery(className:"CurrentState")
query.getObjectInBackgroundWithId("9r61TRaRqu") {
(objects: PFObject?, error: NSError?) -> Void in
if error == nil && objects != nil {
self.currentStateAudio = objects!.objectForKey("currentState") as! String
print(self.currentStateAudio)
} else {
print(error)
}
}
if (oldAudio != self.currentStateAudio)
{
let audioPath = NSBundle.mainBundle().pathForResource(self.currentStateAudio, ofType: "mp3")!
do {
try player = AVAudioPlayer(contentsOfURL: NSURL(fileURLWithPath: audioPath))
} catch {
// Process error here
}
player.play()
oldAudio = self.currentStateAudio
}
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
timer = NSTimer.scheduledTimerWithTimeInterval(1, target: self, selector: Selector("startTimer"), userInfo: nil, repeats: true)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func audioPlayerDidFinishPlaying(player: AVAudioPlayer, successfully flag: Bool)
{
print("Finished Playing")
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
// Get the new view controller using segue.destinationViewController.
// Pass the selected object to the new view controller.
}
*/
}
you're not setting yourself as the player's delegate
before calling play() do player.delegate = self
if you are using private delegate then its will not call too use this for resolving this issue
//MARK:- This is correct delegate working fine (use this)
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
audioRecorder = nil
}
//MARK:- Insted of this (this will not call)
private func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
audioRecorder = nil
}