Calling Game Center Leaderboard from SKScene - swift

Hello I'm trying to call a function to show the GameCenter LeaderBoard from an SKScene but I've been unsuccessful. Here is how I call the function from a SKScene.
class Menu: SKScene {
func score(sender:UIButton!) {
GamecenterUtils.sharedGamecenterUtils.showLeaderboardOnViewController(GamecenterUtils(),leaderboardID:"myleaderID")
}
}
I get a compiler error 'GamecenterUtils' is not convertible to UIViewController.
But I do this because GamecenterUtils is my CGGameCenterControllDelegate.
Here is the GamecenterUtils class than handles everything for Game Center:
private let _sharedGamecenterUtils = GamecenterUtils();
class GamecenterUtils : NSObject, GKGameCenterControllerDelegate {
class var sharedGamecenterUtils : GamecenterUtils{
return _sharedGamecenterUtils;
}
override init(){
}
func authenticateLocalUserOnViewController(viewController:UIViewController){
var localPlayer:GKLocalPlayer = GKLocalPlayer.localPlayer();
if (localPlayer.authenticated == false) {
localPlayer.authenticateHandler = {(authViewController, error) -> Void in
if (authViewController != nil) {
viewController.presentViewController(authViewController,animated:false,nil);
}
}
}
else {
println("Already authenticated");
}
}
func reportScore(score:Int,leaderboardID:NSString) {
var scoreReporter:GKScore = GKScore(leaderboardIdentifier:"catchthategg01");
scoreReporter.value = Int64(score);
scoreReporter.context = 0;
GKScore.reportScores([scoreReporter],{(error) -> Void in
if let reportError = error {
println("Unable to report score!\nError:\(error)");
}
else {
println("Score reported successfully!");
}
});
}
func showLeaderboardOnViewController(viewController:UIViewController?, leaderboardID:NSString){
if let containerController = viewController {
var gamecenterController:GKGameCenterViewController = GKGameCenterViewController();
gamecenterController.gameCenterDelegate = self;
gamecenterController.viewState = GKGameCenterViewControllerState.Leaderboards;
gamecenterController.leaderboardIdentifier = "catchthategg01";
viewController?.presentViewController(gamecenterController,animated:false,nil);
}
}
func gameCenterViewControllerDidFinish(_gameCenterViewController: GKGameCenterViewController!){
_gameCenterViewController.dismissViewControllerAnimated(false,nil);
}
}
Thank you for your help.

Related

Call delegate function from another class

I created a custom class with function delegation. Is it possible to call the didPlaneUpdate function in ViewController1 by calling the addToPlane () function from ViewController2 ? Below is my code:
// CustomClass.swift:
protocol PlaneDelegate: class {
func didPlaneUpdate()
}
class Plane {
static let shared = Plane()
weak var delegate: PlaneDelegate?
public init() { }
public func addToPlane() {
updatePlane()
}
public func updatePlane() {
delegate?.didPlaneUpdate()
}
}
// ViewController1.swift:
class ViewControllerPlane: UIViewController, PlaneDelegate {
var plane = Plane()
override func viewDidLoad() {
super.viewDidLoad()
plane.delegate = self
}
func didPlaneUpdate() {
print("test updated")
}
// ViewController2.swift:
var plane = Plane()
plane.addToPlane()
or
// ViewController2.swift:
Plane.shared.addToPlane()
It doesn't work.

AVCaptureMetadataObjectDelegate not receiving callback

I am making QR scanner. My code is working when all of it written in one place inside ViewController but when I modularised it then I am not getting callback inside AVCaptureMetadataOutputObjectsDelegate.
import Foundation
import UIKit
import AVFoundation
class CameraSource : NSObject {
private var session : AVCaptureSession?
private var inputDevice : AVCaptureDeviceInput?
private var videoPreviewLayer : AVCaptureVideoPreviewLayer?
private var captureMetadataOutput : AVCaptureMetadataOutput?
func setCaptureMetadataOutput() {
self.captureMetadataOutput = nil
self.captureMetadataOutput = AVCaptureMetadataOutput()
}
func getCaptureMetadataOutput() -> AVCaptureMetadataOutput? {
return self.captureMetadataOutput
}
func setInputDevice(inputDevice : AVCaptureDeviceInput?) {
self.inputDevice = inputDevice
}
func getInputDevice() -> AVCaptureDeviceInput? {
return self.inputDevice
}
func setSession(session : AVCaptureSession?) {
self.session = session
}
func getSession() -> AVCaptureSession? {
return self.session
}
func setMetadataObjects(metaObjects : [AVMetadataObject.ObjectType], delegate : AVCaptureMetadataOutputObjectsDelegate) {
assert(self.captureMetadataOutput != nil)
self.captureMetadataOutput!.setMetadataObjectsDelegate(delegate, queue: DispatchQueue.main)
self.captureMetadataOutput!.metadataObjectTypes = metaObjects
}
func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) {
assert(session != nil)
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: session!)
videoPreviewLayer!.videoGravity = videoGravity
videoPreviewLayer!.connection!.videoOrientation = orientation
}
func addVideoLayerToImageView(imageView : UIImageView) {
assert(self.videoPreviewLayer != nil)
imageView.layer.addSublayer(self.videoPreviewLayer!)
self.videoPreviewLayer!.frame = imageView.bounds
}
func startSession() {
assert(session != nil)
self.session!.startRunning()
}
/*==========================================================================
STATIC FUNCTIONS
==========================================================================*/
static func getBackCamera() -> AVCaptureDevice {
return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .back)!
}
static func getFrontCamera() -> AVCaptureDevice {
return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .front)!
}
static func isCameraAvailable() -> Bool {
if #available(iOS 10.0, *) {
let count : Int = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera],
mediaType: AVMediaType.video,
position: .unspecified).devices.count
if count > 0 { return true }
}
else {
let count = AVCaptureDevice.devices(for: AVMediaType.video).count
if count > 0 { return true }
}
return false
}
/*==========================================================================
CAMERA BUILDER CLASS
==========================================================================*/
class Builder {
var cameraSource : CameraSource
init() {
cameraSource = CameraSource()
}
func createSession() -> Builder {
if (cameraSource.getSession() != nil) {
cameraSource.setSession(session: nil)
}
cameraSource.setSession(session: AVCaptureSession())
return self
}
func setSessionPreset(preset : AVCaptureSession.Preset) -> Builder {
assert(cameraSource.getSession() != nil)
cameraSource.getSession()!.sessionPreset = preset
return self
}
func attachInputDevice(camera : AVCaptureDevice) throws -> Builder {
try self.prepareInputDevice(camera: camera)
try self.addInputToSession()
assert(cameraSource.inputDevice != nil)
return self
}
func addOutputToSessionForMetaData() throws -> CameraSource {
cameraSource.setCaptureMetadataOutput()
assert(cameraSource.getSession() != nil && cameraSource.getCaptureMetadataOutput() != nil)
if !cameraSource.getSession()!.canAddOutput(cameraSource.getCaptureMetadataOutput()!) {
throw AppErrorCode.cameraError("Unable to attach output to camera session")
}
cameraSource.getSession()!.addOutput(cameraSource.getCaptureMetadataOutput()!)
return self.cameraSource
}
/*==========================================================================
BUILDER PRIVATE FUNCTIONS
==========================================================================*/
private func prepareInputDevice(camera : AVCaptureDevice) throws {
do {
let inputDevice = try AVCaptureDeviceInput(device: camera)
cameraSource.setInputDevice(inputDevice: inputDevice)
} catch let error as NSError {
print(error.localizedDescription)
throw AppErrorCode.cameraError("Unable to attach input to camera session")
}
}
private func addInputToSession() throws {
if(cameraSource.getSession() == nil) {
throw AppErrorCode.cameraError("Unable to create camera session")
}
assert(cameraSource.getInputDevice() != nil && cameraSource.getSession()!.canAddInput(cameraSource.getInputDevice()!))
cameraSource.getSession()!.addInput(cameraSource.getInputDevice()!)
}
}
}
My QR scanner Code looks like
import UIKit
import Foundation
import AVFoundation
protocol QRScannerDelegate {
func scannedData(_ scannedString : String)
}
class QRScanner : NSObject {
private var cameraSource : CameraSource?
var delegate : QRScannerDelegate?
func prepareCamera (delegate : QRScannerDelegate) throws -> QRScanner {
do {
self.delegate = delegate
self.cameraSource = try CameraSource
.Builder()
.createSession()
.setSessionPreset(preset: .photo)
.attachInputDevice(camera: CameraSource.getBackCamera())
.addOutputToSessionForMetaData()
self.cameraSource!.setMetadataObjects(metaObjects: [.qr], delegate: self as AVCaptureMetadataOutputObjectsDelegate)
} catch let err as NSError {
print(err.localizedDescription)
self.cameraSource = nil
throw AppErrorCode.cameraError("Unable to process camera with one or more issue")
}
return self
}
func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) -> QRScanner{
assert(cameraSource != nil)
self.cameraSource!.initViewoPreviewLayer(videoGravity: videoGravity, orientation: orientation)
return self
}
func addVideoLayerToImageView(imageView : UIImageView) -> QRScanner{
assert(cameraSource != nil)
self.cameraSource!.addVideoLayerToImageView(imageView: imageView)
return self
}
func startSession() {
assert(cameraSource != nil)
self.cameraSource!.startSession()
}
}
extension QRScanner : AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
print("Delegate called")
if metadataObjects.count == 0 {
self.delegate?.scannedData("No Data")
} else {
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if metadataObj.type == AVMetadataObject.ObjectType.qr {
if metadataObj.stringValue != nil {
print("Scanner Getting data: \(metadataObj.stringValue!)")
self.delegate?.scannedData(metadataObj.stringValue!)
}
}
}
}
}
I have implemented the QRScannerDelegate in my ViewController but I am not getting anything in there. Moreover I am not getting callback inside AVCaptureMetadataOutputObjectsDelegate even.
I tried passing the ViewController instance as AVCaptureMetadataOutputObjectsDelegate then I was getting callback with the scanned info.
So My question is why is this happening?
1) When I am passing normal class as AVCaptureMetadataOutputObjectsDelegate I am not getting callback. But.
2) Whe I am passing UIViewController instance as AVCaptureMetadataOutputObjectsDelegate then I am able to get callback.
UPDATE
This is how I am calling prepareCamera from my View Controller
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
do {
try QRScanner().prepareCamera(delegate: self)
.initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
.addVideoLayerToImageView(imageView: self.qrScannerImageView)
.startSession()
} catch {
print("Some Camera Error")
}
self.createOverlay()
}
Its hard to say for sure without knowing how you called prepareCamera as this is what triggers setMetadataObjectsDelegate but to me it looks like you may not be keeping a strong reference to QRScanner in your ViewController (instantiating it as in instance variable) Which could explain why the callback is getting hit when your ViewController is your AVCaptureMetadataOutputObjectsDelegate as the ViewController is still in memory.
It's also worth noting that if the ViewController is your QRScannerDelegate you will want to define delegate as weak var delegate : QRScannerDelegate? to prevent a memory leak.
EDIT:
Change
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
do {
try QRScanner().prepareCamera(delegate: self)
.initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
.addVideoLayerToImageView(imageView: self.qrScannerImageView)
.startSession()
} catch {
print("Some Camera Error")
}
self.createOverlay()
}
to
var qrScanner = QRScanner()
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
do {
try self.qrScanner.prepareCamera(delegate: self)
.initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
.addVideoLayerToImageView(imageView: self.qrScannerImageView)
.startSession()
} catch {
print("Some Camera Error")
}
self.createOverlay()
}
and change
protocol QRScannerDelegate {
func scannedData(_ scannedString : String)
}
to
protocol QRScannerDelegate: class {
func scannedData(_ scannedString : String)
}
To Allow a weak delegate
AVCaptureMetadataOutputObjectsDelegate is tough, but you can do some really cool stuff with it! So keep at it.
I pulled some QRScanner code I wrote a while ago and put it into a gist for you if you want to check it out. Its a bit more stripped down than what you have, but you may find it helpful.
https://gist.github.com/aChase55/733ea89af1bfa80c65971d3bc691f0b2

How to reconnect AKPlayer and AKMixer after AudioKit.stop()

After calling AudioKit.stop(), and then subsequently calling AudioKit.start(), I'm unable to reconnect my AKMixer/AKPlayer to AudioKit. It appears that the engine starts successfully but no sound is produced when I call player.play(). I'm not quite sure what I'm missing. I've boiled it all down to a simple demo project and I've included the most important sections of code below.
View controller
class ViewController: UIViewController {
public let tickSound = MySound(url: "tick.wav")
#IBAction func stopAudioKit(_ sender: Any) {
try! AudioKit.stop()
print("AudioKit stopped")
}
#IBAction func playSound(_ sender: Any) {
tickSound.play()
}
}
Sound Object
class MySound: NSObject {
private static var mixer: AKMixer = AKMixer()
private var player: AKPlayer?
init(url: String) {
super.init()
if let file = try? AKAudioFile(readFileName: url) {
self.player = AKPlayer(audioFile: file)
self.player?.buffering = .always
MySound.mixer.connect(input: self.player)
}
}
func play() {
if AudioKit.output !== MySound.mixer {
AudioKit.output = MySound.mixer
}
if !AudioKit.engine.isRunning {
do {
try AudioKit.start()
} catch {
assert(false)
}
}
if AudioKit.engine.isRunning {
player?.play()
} else {
assert(false)
}
}
}
Github demo project: https://github.com/rednebmas/AudioKitDemo
Thank you!

How can I use Swift protocol function like as Android interface listener implement?

I'm Korean android developer and new at Swift.
I am migrating my android app to ios, but meet a problem with interface and listener. I don't know how to implement listener to communicate between a custom view and a view controller.
I have a custom view(i.e. MyView) that has two buttons and each has own function.
In Android(with Java), I usually make an listener Interface in MyView and assign two functions inside like void func1( String val1 ) and func2...
public class MyView extends RelativeLayout {
private Button b1, b2;
private String val1, val2;
public interface OnButtonListener {
void onFunc1(String val1);
void onFunc2(String val2);
}
private OnButtonListener onButtonListener;
public void setOnButtonListener( OnButtonListener onButtonListener ) {
this.onButtonListener = onButtonListener;
}
public MyView( Context context, AttributeSet attrs ) {
super( context, attrs );
b1.setOnClickListener( view -> {
if (onButtonListener != null){
onButtonListener.onFunc1( val1 );
}
} );
b2.setOnClickListener( view -> {
if (onButtonListener != null){
onButtonListener.onFunc1( val2 );
}
} );
}
}
public class MyActivity extends Activity {
MyView myView1, myView2;
#Override
protected void onCreate( #Nullable Bundle savedInstanceState ) {
super.onCreate( savedInstanceState );
myView1.setOnButtonListener( new MyView.OnButtonListener() {
#Override
public void onFunc1( String val1 ) {
Log.d(TAG, val1);
}
#Override
public void onFunc2( String val2 ) {
Log.d(TAG, val2);
}
} );
myView2.setOnButtonListener( new MyView.OnButtonListener() {
#Override
public void onFunc1( String val1 ) {
// do something1
}
#Override
public void onFunc2( String val2 ) {
// do something2
}
} );
}
}
This code works perfectly as I wanted. So I've tried to apply same pattern into Swift, but I couldn't find any way to do.
below is for swift 4
import Foundation
import UIKit
import SnapKit
protocol OnButtonListener {
func onButton1( _ val1: String )
func onButton2( _ val2: String )
}
class MyView: UIView {
var onButtonListener: OnButtonListener?
var val1 = "abc"
var val2 = "123"
override init( frame: CGRect ) {
super.init( frame: frame )
let b1 = UIButton()
let b2 = UIButton() // i'm using snapkit
b1.addTarget(self, action: #selector(onB1), for: .touchUpInside)
b2.addTarget(self, action: #selector(onB2), for: .touchUpInside)
}
#objc func onB1() {
if onButtonListener != nil {
onButtonListener!.onButton1(val1 )
}
}
#objc func onB2() {
if onButtonListener != nil {
onButtonListener!.onButton2(val2 )
}
}
}
class MyVC : UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
let myView1 = MyView()
let myView2 = MyView()
myView1.onButtonListener = {
// ???
}
myView2.onButtonListener = {
// ???
}
}
}
I don't know how to implement listener in ViewContorller. I've tried same way as Kotlin but I didn't work too. Thank you for reading.
You have to set delegate in your viewcontroller and implement protocol methods in your viewcontroller
class MyVC : UIViewController, OnButtonListener {
override func viewDidLoad() {
super.viewDidLoad()
let myView1 = MyView()
myView1. onButtonListener = self
let myView2 = MyView()
myView2. onButtonListener = self
}
func onButton1( _ val1: String ) {
print(val1)
}
func onButton2( _ val2: String ) {
print(val2)
}
}
**Method 2: ** You can use block as well
class MyView: UIView {
var buttonAction : ((_ value : String) -> Void)? = nil
//.... your code
#objc func onB1() {
if let action = buttonAction {
action("abc")
}
}
#objc func onB2() {
if let action = buttonAction {
action("xyz")
}
}
}
In you ViewController
override func viewDidLoad() {
super.viewDidLoad()
let myView1 = MyView()
myView1.buttonAction = { value in
print(value)
}
}
Update your view controller code as follows:
First confirm your OnButtonListener protocol to UIViewController, and implement protocol method in your view controller.
class MyVC : UIViewController, OnButtonListener {
override func viewDidLoad() {
super.viewDidLoad()
let myView1 = MyView()
// Confirm protocol implementation in current view controller
myView1.onButtonListener = self
let myView2 = MyView()
// Confirm protocol implementation in current view controller
myView2.onButtonListener = self
}
func onButton1( _ val1: String) {
// your code
}
func onButton2( _ val2: String) {
// your code
}
}

Q: AudiKit : AKEqualizerFilter does not work properly with iBAction

Well, I am really confused since when I change the parameters and try to start()/ bypass() a node with iBAction, but nothing happens. The oscillator(WhiteNoise) seems to work properly while the Eq doesn't. Those parameters do have changed, however, the sound remained unchanged.
and here are my codes:
import AudioKit
class Conductor {
private var oscillator = AKWhiteNoise()
public var filter = AKEqualizerFilter()
private var gain = -12.0
init() {
oscillator.amplitude = 1
oscillator.stop()
filter = AKEqualizerFilter(oscillator)
filter.bypass()
AudioKit.output = filter
do {
try AudioKit.start()
} catch {
AKLog("AudioKit did not start!")
}
}
func play() {
if oscillator.isPlaying {
oscillator.stop()
} else {
oscillator.play()
}
}
func bypass(centerFrequency: Double, Q: Double) {
if filter.isBypassed {
filter.rampDuration = 0.3
filter.centerFrequency = centerFrequency
filter.bandwidth = centerFrequency/Q
filter.gain = pow(10, gain/20)
filter.start()
} else {
filter.bypass()
}
print(filter.isBypassed)
}
}
and my calls in ViewController:
#IBAction func bypass(_ sender: Any) {
conductor.bypass(centerFrequency: 125, Q: 7)
}