please help me to solve this error using FolioReaderKit Swift
enter image description here
I was also facing same crash while using Folioreader in my app. I have fixed it by doing below changes in my code.
In FolioReaderAudioPlayer.swift file. Update these 5 methods.
#objc func pause() -> MPRemoteCommandHandlerStatus {
playing = false
if !isTextToSpeech {
if let player = player , player.isPlaying {
player.pause()
}
} else {
if synthesizer.isSpeaking {
synthesizer.pauseSpeaking(at: .word)
}
}
return .success
}
#objc func togglePlay() -> MPRemoteCommandHandlerStatus {
isPlaying() ? pause() : play()
return .success
}
#objc func play() -> MPRemoteCommandHandlerStatus {
if book.hasAudio {
guard let currentPage = self.folioReader.readerCenter?.currentPage else { return .commandFailed }
currentPage.webView?.js("playAudio()")
} else {
self.readCurrentSentence()
}
return .success
}
#objc func playPrevChapter() -> MPRemoteCommandHandlerStatus {
stopPlayerTimer()
// Wait for "currentPage" to update, then request to play audio
self.folioReader.readerCenter?.changePageToPrevious {
if self.isPlaying() {
self.play()
} else {
self.pause()
}
}
return .success
}
#objc func playNextChapter() -> MPRemoteCommandHandlerStatus {
stopPlayerTimer()
// Wait for "currentPage" to update, then request to play audio
self.folioReader.readerCenter?.changePageToNext {
if self.isPlaying() {
self.play()
}
}
return .success
}
Related
I have this problem. I'm trying to show Ads in my app using AdMob. It all works fine (i did some debug) but the view containing the video is not showing... I can't figure out why it's not showing on the top of my content.
Here is the the class that calls and opens the view with ads:
import SwiftUI
import GoogleMobileAds
import UIKit
class Rewarded: NSObject, GADRewardedAdDelegate, ObservableObject{
#Published var adLoaded: Bool = false
var rewardedAd:GADRewardedAd = GADRewardedAd(adUnitID: "ca-app-pub-3940256099942544/1712485313")
var rewardFunction: (() -> Void)? = nil
override init() {
super.init()
LoadRewarded()
}
func LoadRewarded(){
let req = GADRequest()
self.rewardedAd.load(req) { error in
if error != nil {
self.adLoaded = false
print("Ad not loaded")
} else {
self.adLoaded = true
print("Ad loaded")
}
}
}
func showAd(rewardFunction: #escaping () -> Void){
if self.rewardedAd.isReady {
print("Ad is ready to show")
self.rewardFunction = rewardFunction
if let root = UIApplication.shared.windows.first?.rootViewController {
self.rewardedAd.present(fromRootViewController: root, delegate: self)
print("Presenting")
} else {
print("Failed to open ad")
}
} else {
print("Ad not ready to show")
}
}
func rewardedAd(_ rewardedAd: GADRewardedAd, userDidEarn reward: GADAdReward) {
if let rf = rewardFunction {
rf()
}
}
func rewardedAdDidDismiss(_ rewardedAd: GADRewardedAd) {
self.rewardedAd = GADRewardedAd(adUnitID: "ca-app-pub-3940256099942544/1712485313")
LoadRewarded()
}
}
And here is the SwiftUI portion of the code that calls the function of the class:
Button {
self.rewardAd.showAd(rewardFunction: {
// GIVE REWARD HERE
})
} label: {
Text("Watch Ad")
.foregroundColor(Color.white)
.font(.custom(customFont, size: 17))
.fontWeight(.bold)
}
Also the console doesn't print any error...
Does anybody know why it is not showing?
Thank you all
This works for me. I had a heck of a time getting this to work and tried following all of the examples I could find, finally landed on this.
func showAd(rewardFunction: #escaping () -> Void){
if let ad = rewardedAd {
self.rewardFunction = rewardFunction
if let root = UIApplication.shared.windows.first?.rootViewController {
ad.present(fromRootViewController: root, userDidEarnRewardHandler: {
let reward = ad.adReward
// reward the user
})
}
else {
print("Error presenting ad")
}
}
else{
print("Ad Not Ready")
}
}
I have manager class which will connect and manage the data and state of the Bluetooth device.
The manager class conforms to IWDeviceManagerDelegate and has a method which gives the weight data func onReceiveWeightData(_ device: IWDevice!, data: IWWeightData!).
Once I call listenToWeight() from any controller I want to give the data using Observable.
How I fire an onNext event with the data of onReceiveWeightData method to listenToWeight observable?
Below is the code.
class WeightMachineManager: NSObject {
func setup() {
IWDeviceManager.shared()?.delegate = self
IWDeviceManager.shared()?.initMgr()
}
func listenToWeight() -> Observable<IWWeightData> {
let tag = WeightMachineManager.tag
if let connectedDevice = connectedDevice {
IWDeviceManager.shared()?.add(connectedDevice, callback: { (device, code) in
if code == .success {
print("\(tag)[SUCCESS] Device added successfully.")
} else {
print("\(tag)[FAILURE] Failed to add device.")
}
})
} else {
print("\(tag)[FAILURE] Couldn't find any device to connect.")
}
}
}
extension WeightMachineManager: IWDeviceManagerDelegate {
func onReceiveWeightData(_ device: IWDevice!, data: IWWeightData!) {
// TODO:- Pass this data in the onNext event of listenToWeight's observable.
}
}
I've made a lot of assumptions in the below, but the result should look something like this:
class WeightMachineManager {
var connectedDevice: IWDevice?
func setup() {
IWDeviceManager.shared()?.initMgr()
}
func listenToWeight() -> Observable<IWWeightData> {
if let connectedDevice = connectedDevice, let deviceManager = IWDeviceManager.shared() {
return deviceManager.rx.add(connectedDevice)
.flatMap { deviceManager.rx.receivedWeightData() } // maybe this should be flatMapLatest or flatMapFirst. It depends on what is calling listenToWeight() and when.
}
else {
return .error(NSError.init(domain: "WeightMachineManager", code: -1, userInfo: nil))
}
}
}
extension IWDeviceManager: HasDelegate {
public typealias Delegate = IWDeviceManagerDelegate
}
class IWDeviceManagerDelegateProxy
: DelegateProxy<IWDeviceManager, IWDeviceManagerDelegate>
, DelegateProxyType
, IWDeviceManagerDelegate {
init(parentObject: IWDeviceManager) {
super.init(parentObject: parentObject, delegateProxy: IWDeviceManagerDelegateProxy.self)
}
public static func registerKnownImplementations() {
self.register { IWDeviceManagerDelegateProxy(parentObject: $0) }
}
}
extension Reactive where Base: IWDeviceManager {
var delegate: IWDeviceManagerDelegateProxy {
return IWDeviceManagerDelegateProxy.proxy(for: base)
}
func add(_ device: IWDevice) -> Observable<Void> {
return Observable.create { observer in
self.base.add(device, callback: { device, code in
if code == .success {
observer.onNext(())
observer.onCompleted()
}
else {
observer.onError(NSError.init(domain: "IWDeviceManager", code: -1, userInfo: nil))
}
})
return Disposables.create()
}
}
func receivedWeightData() -> Observable<IWWeightData> {
return delegate.methodInvoked(#selector(IWDeviceManagerDelegate.onReceiveWeightData(_:data:)))
.map { $0[1] as! IWWeightData }
}
}
I am making QR scanner. My code is working when all of it written in one place inside ViewController but when I modularised it then I am not getting callback inside AVCaptureMetadataOutputObjectsDelegate.
import Foundation
import UIKit
import AVFoundation
class CameraSource : NSObject {
private var session : AVCaptureSession?
private var inputDevice : AVCaptureDeviceInput?
private var videoPreviewLayer : AVCaptureVideoPreviewLayer?
private var captureMetadataOutput : AVCaptureMetadataOutput?
func setCaptureMetadataOutput() {
self.captureMetadataOutput = nil
self.captureMetadataOutput = AVCaptureMetadataOutput()
}
func getCaptureMetadataOutput() -> AVCaptureMetadataOutput? {
return self.captureMetadataOutput
}
func setInputDevice(inputDevice : AVCaptureDeviceInput?) {
self.inputDevice = inputDevice
}
func getInputDevice() -> AVCaptureDeviceInput? {
return self.inputDevice
}
func setSession(session : AVCaptureSession?) {
self.session = session
}
func getSession() -> AVCaptureSession? {
return self.session
}
func setMetadataObjects(metaObjects : [AVMetadataObject.ObjectType], delegate : AVCaptureMetadataOutputObjectsDelegate) {
assert(self.captureMetadataOutput != nil)
self.captureMetadataOutput!.setMetadataObjectsDelegate(delegate, queue: DispatchQueue.main)
self.captureMetadataOutput!.metadataObjectTypes = metaObjects
}
func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) {
assert(session != nil)
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: session!)
videoPreviewLayer!.videoGravity = videoGravity
videoPreviewLayer!.connection!.videoOrientation = orientation
}
func addVideoLayerToImageView(imageView : UIImageView) {
assert(self.videoPreviewLayer != nil)
imageView.layer.addSublayer(self.videoPreviewLayer!)
self.videoPreviewLayer!.frame = imageView.bounds
}
func startSession() {
assert(session != nil)
self.session!.startRunning()
}
/*==========================================================================
STATIC FUNCTIONS
==========================================================================*/
static func getBackCamera() -> AVCaptureDevice {
return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .back)!
}
static func getFrontCamera() -> AVCaptureDevice {
return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .front)!
}
static func isCameraAvailable() -> Bool {
if #available(iOS 10.0, *) {
let count : Int = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera],
mediaType: AVMediaType.video,
position: .unspecified).devices.count
if count > 0 { return true }
}
else {
let count = AVCaptureDevice.devices(for: AVMediaType.video).count
if count > 0 { return true }
}
return false
}
/*==========================================================================
CAMERA BUILDER CLASS
==========================================================================*/
class Builder {
var cameraSource : CameraSource
init() {
cameraSource = CameraSource()
}
func createSession() -> Builder {
if (cameraSource.getSession() != nil) {
cameraSource.setSession(session: nil)
}
cameraSource.setSession(session: AVCaptureSession())
return self
}
func setSessionPreset(preset : AVCaptureSession.Preset) -> Builder {
assert(cameraSource.getSession() != nil)
cameraSource.getSession()!.sessionPreset = preset
return self
}
func attachInputDevice(camera : AVCaptureDevice) throws -> Builder {
try self.prepareInputDevice(camera: camera)
try self.addInputToSession()
assert(cameraSource.inputDevice != nil)
return self
}
func addOutputToSessionForMetaData() throws -> CameraSource {
cameraSource.setCaptureMetadataOutput()
assert(cameraSource.getSession() != nil && cameraSource.getCaptureMetadataOutput() != nil)
if !cameraSource.getSession()!.canAddOutput(cameraSource.getCaptureMetadataOutput()!) {
throw AppErrorCode.cameraError("Unable to attach output to camera session")
}
cameraSource.getSession()!.addOutput(cameraSource.getCaptureMetadataOutput()!)
return self.cameraSource
}
/*==========================================================================
BUILDER PRIVATE FUNCTIONS
==========================================================================*/
private func prepareInputDevice(camera : AVCaptureDevice) throws {
do {
let inputDevice = try AVCaptureDeviceInput(device: camera)
cameraSource.setInputDevice(inputDevice: inputDevice)
} catch let error as NSError {
print(error.localizedDescription)
throw AppErrorCode.cameraError("Unable to attach input to camera session")
}
}
private func addInputToSession() throws {
if(cameraSource.getSession() == nil) {
throw AppErrorCode.cameraError("Unable to create camera session")
}
assert(cameraSource.getInputDevice() != nil && cameraSource.getSession()!.canAddInput(cameraSource.getInputDevice()!))
cameraSource.getSession()!.addInput(cameraSource.getInputDevice()!)
}
}
}
My QR scanner Code looks like
import UIKit
import Foundation
import AVFoundation
protocol QRScannerDelegate {
func scannedData(_ scannedString : String)
}
class QRScanner : NSObject {
private var cameraSource : CameraSource?
var delegate : QRScannerDelegate?
func prepareCamera (delegate : QRScannerDelegate) throws -> QRScanner {
do {
self.delegate = delegate
self.cameraSource = try CameraSource
.Builder()
.createSession()
.setSessionPreset(preset: .photo)
.attachInputDevice(camera: CameraSource.getBackCamera())
.addOutputToSessionForMetaData()
self.cameraSource!.setMetadataObjects(metaObjects: [.qr], delegate: self as AVCaptureMetadataOutputObjectsDelegate)
} catch let err as NSError {
print(err.localizedDescription)
self.cameraSource = nil
throw AppErrorCode.cameraError("Unable to process camera with one or more issue")
}
return self
}
func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) -> QRScanner{
assert(cameraSource != nil)
self.cameraSource!.initViewoPreviewLayer(videoGravity: videoGravity, orientation: orientation)
return self
}
func addVideoLayerToImageView(imageView : UIImageView) -> QRScanner{
assert(cameraSource != nil)
self.cameraSource!.addVideoLayerToImageView(imageView: imageView)
return self
}
func startSession() {
assert(cameraSource != nil)
self.cameraSource!.startSession()
}
}
extension QRScanner : AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
print("Delegate called")
if metadataObjects.count == 0 {
self.delegate?.scannedData("No Data")
} else {
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if metadataObj.type == AVMetadataObject.ObjectType.qr {
if metadataObj.stringValue != nil {
print("Scanner Getting data: \(metadataObj.stringValue!)")
self.delegate?.scannedData(metadataObj.stringValue!)
}
}
}
}
}
I have implemented the QRScannerDelegate in my ViewController but I am not getting anything in there. Moreover I am not getting callback inside AVCaptureMetadataOutputObjectsDelegate even.
I tried passing the ViewController instance as AVCaptureMetadataOutputObjectsDelegate then I was getting callback with the scanned info.
So My question is why is this happening?
1) When I am passing normal class as AVCaptureMetadataOutputObjectsDelegate I am not getting callback. But.
2) Whe I am passing UIViewController instance as AVCaptureMetadataOutputObjectsDelegate then I am able to get callback.
UPDATE
This is how I am calling prepareCamera from my View Controller
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
do {
try QRScanner().prepareCamera(delegate: self)
.initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
.addVideoLayerToImageView(imageView: self.qrScannerImageView)
.startSession()
} catch {
print("Some Camera Error")
}
self.createOverlay()
}
Its hard to say for sure without knowing how you called prepareCamera as this is what triggers setMetadataObjectsDelegate but to me it looks like you may not be keeping a strong reference to QRScanner in your ViewController (instantiating it as in instance variable) Which could explain why the callback is getting hit when your ViewController is your AVCaptureMetadataOutputObjectsDelegate as the ViewController is still in memory.
It's also worth noting that if the ViewController is your QRScannerDelegate you will want to define delegate as weak var delegate : QRScannerDelegate? to prevent a memory leak.
EDIT:
Change
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
do {
try QRScanner().prepareCamera(delegate: self)
.initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
.addVideoLayerToImageView(imageView: self.qrScannerImageView)
.startSession()
} catch {
print("Some Camera Error")
}
self.createOverlay()
}
to
var qrScanner = QRScanner()
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
do {
try self.qrScanner.prepareCamera(delegate: self)
.initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
.addVideoLayerToImageView(imageView: self.qrScannerImageView)
.startSession()
} catch {
print("Some Camera Error")
}
self.createOverlay()
}
and change
protocol QRScannerDelegate {
func scannedData(_ scannedString : String)
}
to
protocol QRScannerDelegate: class {
func scannedData(_ scannedString : String)
}
To Allow a weak delegate
AVCaptureMetadataOutputObjectsDelegate is tough, but you can do some really cool stuff with it! So keep at it.
I pulled some QRScanner code I wrote a while ago and put it into a gist for you if you want to check it out. Its a bit more stripped down than what you have, but you may find it helpful.
https://gist.github.com/aChase55/733ea89af1bfa80c65971d3bc691f0b2
After calling AudioKit.stop(), and then subsequently calling AudioKit.start(), I'm unable to reconnect my AKMixer/AKPlayer to AudioKit. It appears that the engine starts successfully but no sound is produced when I call player.play(). I'm not quite sure what I'm missing. I've boiled it all down to a simple demo project and I've included the most important sections of code below.
View controller
class ViewController: UIViewController {
public let tickSound = MySound(url: "tick.wav")
#IBAction func stopAudioKit(_ sender: Any) {
try! AudioKit.stop()
print("AudioKit stopped")
}
#IBAction func playSound(_ sender: Any) {
tickSound.play()
}
}
Sound Object
class MySound: NSObject {
private static var mixer: AKMixer = AKMixer()
private var player: AKPlayer?
init(url: String) {
super.init()
if let file = try? AKAudioFile(readFileName: url) {
self.player = AKPlayer(audioFile: file)
self.player?.buffering = .always
MySound.mixer.connect(input: self.player)
}
}
func play() {
if AudioKit.output !== MySound.mixer {
AudioKit.output = MySound.mixer
}
if !AudioKit.engine.isRunning {
do {
try AudioKit.start()
} catch {
assert(false)
}
}
if AudioKit.engine.isRunning {
player?.play()
} else {
assert(false)
}
}
}
Github demo project: https://github.com/rednebmas/AudioKitDemo
Thank you!
I need to connect UI with the module.
Error: Cannot convert value of type '()' to closure result type 'Bool'
Controller Code:
private func connectLogicView() {
screenView.handlePlayPauseButton(sender: screenView.playPauseButton) { () -> Bool in
obj.playPause(queue: .global()) { //Error here
if self.obj.getPlayer().timeControlStatus != .paused {
return true
} else if self.obj.getPlayer().timeControlStatus == .paused {
return false
}
}
}
}
Module code:
public func playPause(queue: DispatchQueue, completion: #escaping ()->Void){
if player.timeControlStatus == .playing {
player.pause()
} else {
player.play()
}
queue.async {
completion()
}
}
UI code:
func handlePlayPauseButton(sender: UIButton, completion: () -> Bool) {
if completion() == true {
sender.setTitle("pause", for: .normal)
} else {
sender.setTitle("play", for: .normal)
}
}
If you can show a better way, it will be greatly appreciated.
playPause method don't have anything in completion block but
obj.playPause(queue: .global()) {
if self.obj.getPlayer().timeControlStatus != .paused {
return true // NO NEED FOR THIS LINE
} else if self.obj.getPlayer().timeControlStatus == .paused {
return false // NO NEED FOR THIS LINE
}
}
You are returning true or false remove that and your code will run fine
As #Prashant Tukadiya said the code seems to be little bit tricky/complex.
Easy way would be something similar to below
func playPause(queue: DispatchQueue, completion: #escaping (Bool) -> Void){
let someOtherCondition: Bool = false
var returnAnswer: Bool = false
if someOtherCondition {
print("player paused")
returnAnswer = true
} else {
print("player play")
}
queue.async {
completion(returnAnswer)
}
}
func connectLogicView() {
let aSampleButton: UIButton = UIButton.init()
playPause(queue: .global()) { (isPlaying) in
if isPlaying {
aSampleButton.setTitle("pause", for: .normal)
} else {
aSampleButton.setTitle("play", for: .normal)
}
}
}