After calling AudioKit.stop(), and then subsequently calling AudioKit.start(), I'm unable to reconnect my AKMixer/AKPlayer to AudioKit. It appears that the engine starts successfully but no sound is produced when I call player.play(). I'm not quite sure what I'm missing. I've boiled it all down to a simple demo project and I've included the most important sections of code below.
View controller
class ViewController: UIViewController {
public let tickSound = MySound(url: "tick.wav")
#IBAction func stopAudioKit(_ sender: Any) {
try! AudioKit.stop()
print("AudioKit stopped")
}
#IBAction func playSound(_ sender: Any) {
tickSound.play()
}
}
Sound Object
class MySound: NSObject {
private static var mixer: AKMixer = AKMixer()
private var player: AKPlayer?
init(url: String) {
super.init()
if let file = try? AKAudioFile(readFileName: url) {
self.player = AKPlayer(audioFile: file)
self.player?.buffering = .always
MySound.mixer.connect(input: self.player)
}
}
func play() {
if AudioKit.output !== MySound.mixer {
AudioKit.output = MySound.mixer
}
if !AudioKit.engine.isRunning {
do {
try AudioKit.start()
} catch {
assert(false)
}
}
if AudioKit.engine.isRunning {
player?.play()
} else {
assert(false)
}
}
}
Github demo project: https://github.com/rednebmas/AudioKitDemo
Thank you!
Related
I've been trying for a long time to program an Xcode interface to communicate with my Arduino Mega. but the whole thing didn't work as well as intended. I did the whole thing with ORSSerialPort.
In the Xcode project I wrote this for the swift file ViewController.swift :
import Cocoa
import ORSSerial
class ViewController: NSViewController, ORSSerialPortDelegate {
var serialPort = ORSSerialPort(path: "/dev/cu.usbmodem142101")
func SendString(data: String){
let stringData = Data(data.utf8)
serialPort?.send(stringData)
}
func openPort(){
serialPort?.baudRate=9600
serialPort?.delegate=self
serialPort?.parity = .none
serialPort?.numberOfStopBits = 1
serialPort?.open()
print("serialport is open")
}
func closePort(){
serialPort?.delegate=nil
serialPort?.close()
print("serialport is close")
}
override func viewDidLoad() {
super.viewDidLoad()
}
override var representedObject: Any? {
didSet {
}
}
#IBAction func onButton(_ sender: Any) {
openPort()
}
#IBAction func OffButton(_ sender: Any) {
closePort()
}
#IBAction func SendButton(_ sender: Any) {
SendString(data: "stringdata blablabla")
}
func serialPortWasOpened(_ serialPort: ORSSerialPort) {
print("serialPort to \(serialPort) is run")
}
func serialPortWasRemovedFromSystem(_ serialPort: ORSSerialPort) {
self.serialPort = nil
}
}
and this code i have load on the Arduino mega:
String angel;
void setup() {
Serial.begin(9600);
}
void loop() {
angel = Serial.readString();
Serial.println(angel);
delay(350);
}
unfortunately it doesn't work and I don't know why.
Your question doesn't provide any detail about what part(s) don't work, but there's one definite problem.
Your Arduino program looks like it echos everything it receives on the serial port back on the same port. In order to see that on the computer, you'll have to implement the serialPort(_:didReceive:) method in your view controller. Something like this:
func serialPort(_ serialPort: ORSSerialPort, didReceive data: Data) {
guard let string = String(data: data, encoding: .ascii) else { return; }
print("Received: \(string)")
}
This is the barcode scanning tutorial I used in my program, so that you have a lot more context when you read my code: Link
Here is what my program does so far: Essentially, when I scan an item's barcode with my phone, the UIAlert pops up with the barcode ID displayed and a button prompting the user to open the "Results" page. This is all fine and good, but how do I pass that same scanned barcode ID into a label on the Result's page? I have been stuck on this for 2 days now, even though it seems like such an easy task.
Any help is much appreciated <3
Here is my relevant code:
ProductCatalog.plist ->
Link to Image
Scanner_ViewController.swift (first View Controller) ->
import UIKit
import AVFoundation
class Scanner_ViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate, ScannerDelegate
{
private var scanner: Scanner?
override func viewDidLoad()
{
super.viewDidLoad()
self.scanner = Scanner(withDelegate: self)
guard let scanner = self.scanner else
{
return
}
scanner.requestCaptureSessionStartRunning()
}
override func didReceiveMemoryWarning()
{
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// Mark - AVFoundation delegate methods
public func metadataOutput(_ output: AVCaptureMetadataOutput,
didOutput metadataObjects: [AVMetadataObject],
from connection: AVCaptureConnection)
{
guard let scanner = self.scanner else
{
return
}
scanner.metadataOutput(output,
didOutput: metadataObjects,
from: connection)
}
// Mark - Scanner delegate methods
func cameraView() -> UIView
{
return self.view
}
func delegateViewController() -> UIViewController
{
return self
}
func scanCompleted(withCode code: String)
{
print(code)
showAlert_Success(withTitle: (code))
}
private func showAlert_Success(withTitle title: String)
{
let alertController = UIAlertController(title: title, message: "Product has been successfully scanned", preferredStyle: .alert)
// programatically segue to the next view controller when the UIAlert pops up
alertController.addAction(UIAlertAction(title:"Get Results", style: .default, handler:{ action in self.performSegue(withIdentifier: "toAnalysisPage", sender: self) }))
present(alertController, animated: true)
}
}
Scanner.Swift (accompanies Scanner_ViewController.swift)->
import Foundation
import UIKit
import AVFoundation
protocol ScannerDelegate: class
{
func cameraView() -> UIView
func delegateViewController() -> UIViewController
func scanCompleted(withCode code: String)
}
class Scanner: NSObject
{
public weak var delegate: ScannerDelegate?
private var captureSession : AVCaptureSession?
init(withDelegate delegate: ScannerDelegate)
{
self.delegate = delegate
super.init()
self.scannerSetup()
}
private func scannerSetup()
{
guard let captureSession = self.createCaptureSession()
else
{
return
}
self.captureSession = captureSession
guard let delegate = self.delegate
else
{
return
}
let cameraView = delegate.cameraView()
let previewLayer = self.createPreviewLayer(withCaptureSession: captureSession,
view: cameraView)
cameraView.layer.addSublayer(previewLayer)
}
private func createCaptureSession() -> AVCaptureSession?
{
do
{
let captureSession = AVCaptureSession()
guard let captureDevice = AVCaptureDevice.default(for: .video) else
{
return nil
}
let deviceInput = try AVCaptureDeviceInput(device: captureDevice)
let metaDataOutput = AVCaptureMetadataOutput()
// add device input
if captureSession.canAddInput(deviceInput) && captureSession.canAddOutput(metaDataOutput)
{
captureSession.addInput(deviceInput)
captureSession.addOutput(metaDataOutput)
guard let delegate = self.delegate,
let viewController = delegate.delegateViewController() as? AVCaptureMetadataOutputObjectsDelegate else
{
return nil
}
metaDataOutput.setMetadataObjectsDelegate(viewController,
queue: DispatchQueue.main)
metaDataOutput.metadataObjectTypes = self.metaObjectTypes()
return captureSession
}
}
catch
{
// handle error
}
return nil
}
private func createPreviewLayer(withCaptureSession captureSession: AVCaptureSession,
view: UIView) -> AVCaptureVideoPreviewLayer
{
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
return previewLayer
}
private func metaObjectTypes() -> [AVMetadataObject.ObjectType]
{
return [.qr,
.code128,
.code39,
.code39Mod43,
.code93,
.ean13,
.ean8,
.interleaved2of5,
.itf14,
.pdf417,
.upce
]
}
public func metadataOutput(_ output: AVCaptureMetadataOutput,
didOutput metadataObjects: [AVMetadataObject],
from connection: AVCaptureConnection)
{
self.requestCaptureSessionStopRunning()
guard let metadataObject = metadataObjects.first,
let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject,
let scannedValue = readableObject.stringValue,
let delegate = self.delegate
else
{
return
}
delegate.scanCompleted(withCode: scannedValue)
}
public func requestCaptureSessionStartRunning()
{
self.toggleCaptureSessionRunningState()
}
public func requestCaptureSessionStopRunning()
{
self.toggleCaptureSessionRunningState()
}
private func toggleCaptureSessionRunningState()
{
guard let captureSession = self.captureSession
else
{
return
}
if !captureSession.isRunning
{
captureSession.startRunning()
}
else
{
captureSession.stopRunning()
}
}
}
Analysis_ViewController.swift (second view controller) ->
Right now, the forKey: has been hard-coded to item ID 8710908501708 because I have no idea how to actually pass camera-scanned ID's into the second View Controller :/
import UIKit
class Analysis_ViewController: UIViewController
{
#IBOutlet weak var productTitle: UILabel!
func getData()
{
let path = Bundle.main.path(forResource:"ProductCatalog", ofType: "plist")
let dict:NSDictionary = NSDictionary(contentsOfFile: path!)!
if (dict.object(forKey: "8710908501708" as Any) != nil)
{
if let levelDict:[String : Any] = dict.object(forKey: "8710908501708" as Any) as? [String : Any]
{
// use a for loop to iterate through all the keys and values in side the "Levels" dictionary
for (key, value) in levelDict
{
// if we find a key named whatever we care about, we can print out the value
if (key == "name")
{
productTitle.text = (value as! String)
}
}
}
}
}
// listing the better options that are safer in comparison to the scanned product image
override func viewDidLoad()
{
super.viewDidLoad()
getData()
}
}
Do you have a variable to hold the scanned ID in your view controllers? If not, you can add var itemID: String? to both Scanner_ViewController and Analysis_ViewController.
Then in your func where you get the scanned code, you can set it to the variable.
func scanCompleted(withCode code: String) {
print(code)
itemID = code // Saves the scanned code to your var
showAlert_Success(withTitle: (code))
}
For passing data to another view controller via segue, you might want to look into this UIViewController method for segues: documentation here. This answer also might help.
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "toAnalysisPage" {
if let viewController = segue.destination as? Analysis_ViewController {
viewController.itemID = itemID
}
}
}
I'm going through Spotify's authentication process and am requesting the scopes appRemoteControl for my app to control music and userReadCurrentlyPlaying for current song. I set up everything from the SPTConfiguration, SPTSessionManager, and SPTAppRemote, and their required delegate methods (SPTAppRemoteDelegate, SPTSessionManagerDelegate, SPTAppRemotePlayerStateDelegate) as well as initiating a session with the requested scopes whenever the user presses a button but I can't get the method
func sessionManager(manager: SPTSessionManager, didInitiate session: SPTSession) {
appRemote.connectionParameters.accessToken = session.accessToken
appRemote.connect()
print(session.accessToken)
}
to trigger. The authentication process fully works as it goes into my spotify app and returns back to my application and plays a song from the configuration.playURI = "" , however, the method above never is called. I followed the spotify demo project but still does not work. Here is my full code
class LogInViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
}
let spotifyClientID = Constants.clientID
let spotifyRedirectURL = Constants.redirectURI
let tokenSwap = "https://***********.glitch.me/api/token"
let refresh = "https://***********.glitch.me/api/refresh_token"
lazy var configuration: SPTConfiguration = {
let configuration = SPTConfiguration(clientID: spotifyClientID, redirectURL: URL(string: "Lyrically://callback")!)
return configuration
}()
lazy var sessionManager: SPTSessionManager = {
let manager = SPTSessionManager(configuration: configuration, delegate: self)
if let tokenSwapURL = URL(string: tokenSwap), let tokenRefreshURL = URL(string: refresh) {
self.configuration.tokenSwapURL = tokenSwapURL
self.configuration.tokenRefreshURL = tokenRefreshURL
self.configuration.playURI = ""
}
return manager
}()
lazy var appRemote: SPTAppRemote = {
let appRemote = SPTAppRemote(configuration: configuration, logLevel: .debug)
appRemote.delegate = self
return appRemote
}()
#IBAction func logIn(_ sender: UIButton) {
let requestedScopes: SPTScope = [.appRemoteControl, .userReadCurrentlyPlaying]
sessionManager.initiateSession(with: requestedScopes, options: .default)
}
}
extension LogInViewController: SPTAppRemotePlayerStateDelegate {
func playerStateDidChange(_ playerState: SPTAppRemotePlayerState) {
print("state changed")
}
}
extension LogInViewController: SPTAppRemoteDelegate {
func appRemoteDidEstablishConnection(_ appRemote: SPTAppRemote) {
print("connected")
appRemote.playerAPI?.delegate = self
appRemote.playerAPI?.subscribe(toPlayerState: { (success, error) in
if let error = error {
print("Error subscribing to player state:" + error.localizedDescription)
}
})
}
func appRemote(_ appRemote: SPTAppRemote, didFailConnectionAttemptWithError error: Error?) {
print("failed")
}
func appRemote(_ appRemote: SPTAppRemote, didDisconnectWithError error: Error?) {
print("disconnected")
}
}
extension LogInViewController: SPTSessionManagerDelegate {
func sessionManager(manager: SPTSessionManager, didInitiate session: SPTSession) {
appRemote.connectionParameters.accessToken = session.accessToken
appRemote.connect()
print(session.accessToken)
}
func sessionManager(manager: SPTSessionManager, didFailWith error: Error) {
print("failed",error)
}
}
Figured it out. Had to get a hold of the sessionManager from the LogInViewController by making an instance of it
lazy var logInVC = LogInViewController()
then added this line of code into the openURLContexts method in scene delegate
func scene(_ scene: UIScene, openURLContexts URLContexts: Set<UIOpenURLContext>) {
print("Opened url")
guard let url = URLContexts.first?.url else {
return
}
logInVC.sessionManager.application(UIApplication.shared, open: url, options: [:])
}
I am making QR scanner. My code is working when all of it written in one place inside ViewController but when I modularised it then I am not getting callback inside AVCaptureMetadataOutputObjectsDelegate.
import Foundation
import UIKit
import AVFoundation
class CameraSource : NSObject {
private var session : AVCaptureSession?
private var inputDevice : AVCaptureDeviceInput?
private var videoPreviewLayer : AVCaptureVideoPreviewLayer?
private var captureMetadataOutput : AVCaptureMetadataOutput?
func setCaptureMetadataOutput() {
self.captureMetadataOutput = nil
self.captureMetadataOutput = AVCaptureMetadataOutput()
}
func getCaptureMetadataOutput() -> AVCaptureMetadataOutput? {
return self.captureMetadataOutput
}
func setInputDevice(inputDevice : AVCaptureDeviceInput?) {
self.inputDevice = inputDevice
}
func getInputDevice() -> AVCaptureDeviceInput? {
return self.inputDevice
}
func setSession(session : AVCaptureSession?) {
self.session = session
}
func getSession() -> AVCaptureSession? {
return self.session
}
func setMetadataObjects(metaObjects : [AVMetadataObject.ObjectType], delegate : AVCaptureMetadataOutputObjectsDelegate) {
assert(self.captureMetadataOutput != nil)
self.captureMetadataOutput!.setMetadataObjectsDelegate(delegate, queue: DispatchQueue.main)
self.captureMetadataOutput!.metadataObjectTypes = metaObjects
}
func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) {
assert(session != nil)
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: session!)
videoPreviewLayer!.videoGravity = videoGravity
videoPreviewLayer!.connection!.videoOrientation = orientation
}
func addVideoLayerToImageView(imageView : UIImageView) {
assert(self.videoPreviewLayer != nil)
imageView.layer.addSublayer(self.videoPreviewLayer!)
self.videoPreviewLayer!.frame = imageView.bounds
}
func startSession() {
assert(session != nil)
self.session!.startRunning()
}
/*==========================================================================
STATIC FUNCTIONS
==========================================================================*/
static func getBackCamera() -> AVCaptureDevice {
return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .back)!
}
static func getFrontCamera() -> AVCaptureDevice {
return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .front)!
}
static func isCameraAvailable() -> Bool {
if #available(iOS 10.0, *) {
let count : Int = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera],
mediaType: AVMediaType.video,
position: .unspecified).devices.count
if count > 0 { return true }
}
else {
let count = AVCaptureDevice.devices(for: AVMediaType.video).count
if count > 0 { return true }
}
return false
}
/*==========================================================================
CAMERA BUILDER CLASS
==========================================================================*/
class Builder {
var cameraSource : CameraSource
init() {
cameraSource = CameraSource()
}
func createSession() -> Builder {
if (cameraSource.getSession() != nil) {
cameraSource.setSession(session: nil)
}
cameraSource.setSession(session: AVCaptureSession())
return self
}
func setSessionPreset(preset : AVCaptureSession.Preset) -> Builder {
assert(cameraSource.getSession() != nil)
cameraSource.getSession()!.sessionPreset = preset
return self
}
func attachInputDevice(camera : AVCaptureDevice) throws -> Builder {
try self.prepareInputDevice(camera: camera)
try self.addInputToSession()
assert(cameraSource.inputDevice != nil)
return self
}
func addOutputToSessionForMetaData() throws -> CameraSource {
cameraSource.setCaptureMetadataOutput()
assert(cameraSource.getSession() != nil && cameraSource.getCaptureMetadataOutput() != nil)
if !cameraSource.getSession()!.canAddOutput(cameraSource.getCaptureMetadataOutput()!) {
throw AppErrorCode.cameraError("Unable to attach output to camera session")
}
cameraSource.getSession()!.addOutput(cameraSource.getCaptureMetadataOutput()!)
return self.cameraSource
}
/*==========================================================================
BUILDER PRIVATE FUNCTIONS
==========================================================================*/
private func prepareInputDevice(camera : AVCaptureDevice) throws {
do {
let inputDevice = try AVCaptureDeviceInput(device: camera)
cameraSource.setInputDevice(inputDevice: inputDevice)
} catch let error as NSError {
print(error.localizedDescription)
throw AppErrorCode.cameraError("Unable to attach input to camera session")
}
}
private func addInputToSession() throws {
if(cameraSource.getSession() == nil) {
throw AppErrorCode.cameraError("Unable to create camera session")
}
assert(cameraSource.getInputDevice() != nil && cameraSource.getSession()!.canAddInput(cameraSource.getInputDevice()!))
cameraSource.getSession()!.addInput(cameraSource.getInputDevice()!)
}
}
}
My QR scanner Code looks like
import UIKit
import Foundation
import AVFoundation
protocol QRScannerDelegate {
func scannedData(_ scannedString : String)
}
class QRScanner : NSObject {
private var cameraSource : CameraSource?
var delegate : QRScannerDelegate?
func prepareCamera (delegate : QRScannerDelegate) throws -> QRScanner {
do {
self.delegate = delegate
self.cameraSource = try CameraSource
.Builder()
.createSession()
.setSessionPreset(preset: .photo)
.attachInputDevice(camera: CameraSource.getBackCamera())
.addOutputToSessionForMetaData()
self.cameraSource!.setMetadataObjects(metaObjects: [.qr], delegate: self as AVCaptureMetadataOutputObjectsDelegate)
} catch let err as NSError {
print(err.localizedDescription)
self.cameraSource = nil
throw AppErrorCode.cameraError("Unable to process camera with one or more issue")
}
return self
}
func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) -> QRScanner{
assert(cameraSource != nil)
self.cameraSource!.initViewoPreviewLayer(videoGravity: videoGravity, orientation: orientation)
return self
}
func addVideoLayerToImageView(imageView : UIImageView) -> QRScanner{
assert(cameraSource != nil)
self.cameraSource!.addVideoLayerToImageView(imageView: imageView)
return self
}
func startSession() {
assert(cameraSource != nil)
self.cameraSource!.startSession()
}
}
extension QRScanner : AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
print("Delegate called")
if metadataObjects.count == 0 {
self.delegate?.scannedData("No Data")
} else {
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if metadataObj.type == AVMetadataObject.ObjectType.qr {
if metadataObj.stringValue != nil {
print("Scanner Getting data: \(metadataObj.stringValue!)")
self.delegate?.scannedData(metadataObj.stringValue!)
}
}
}
}
}
I have implemented the QRScannerDelegate in my ViewController but I am not getting anything in there. Moreover I am not getting callback inside AVCaptureMetadataOutputObjectsDelegate even.
I tried passing the ViewController instance as AVCaptureMetadataOutputObjectsDelegate then I was getting callback with the scanned info.
So My question is why is this happening?
1) When I am passing normal class as AVCaptureMetadataOutputObjectsDelegate I am not getting callback. But.
2) Whe I am passing UIViewController instance as AVCaptureMetadataOutputObjectsDelegate then I am able to get callback.
UPDATE
This is how I am calling prepareCamera from my View Controller
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
do {
try QRScanner().prepareCamera(delegate: self)
.initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
.addVideoLayerToImageView(imageView: self.qrScannerImageView)
.startSession()
} catch {
print("Some Camera Error")
}
self.createOverlay()
}
Its hard to say for sure without knowing how you called prepareCamera as this is what triggers setMetadataObjectsDelegate but to me it looks like you may not be keeping a strong reference to QRScanner in your ViewController (instantiating it as in instance variable) Which could explain why the callback is getting hit when your ViewController is your AVCaptureMetadataOutputObjectsDelegate as the ViewController is still in memory.
It's also worth noting that if the ViewController is your QRScannerDelegate you will want to define delegate as weak var delegate : QRScannerDelegate? to prevent a memory leak.
EDIT:
Change
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
do {
try QRScanner().prepareCamera(delegate: self)
.initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
.addVideoLayerToImageView(imageView: self.qrScannerImageView)
.startSession()
} catch {
print("Some Camera Error")
}
self.createOverlay()
}
to
var qrScanner = QRScanner()
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
do {
try self.qrScanner.prepareCamera(delegate: self)
.initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
.addVideoLayerToImageView(imageView: self.qrScannerImageView)
.startSession()
} catch {
print("Some Camera Error")
}
self.createOverlay()
}
and change
protocol QRScannerDelegate {
func scannedData(_ scannedString : String)
}
to
protocol QRScannerDelegate: class {
func scannedData(_ scannedString : String)
}
To Allow a weak delegate
AVCaptureMetadataOutputObjectsDelegate is tough, but you can do some really cool stuff with it! So keep at it.
I pulled some QRScanner code I wrote a while ago and put it into a gist for you if you want to check it out. Its a bit more stripped down than what you have, but you may find it helpful.
https://gist.github.com/aChase55/733ea89af1bfa80c65971d3bc691f0b2
I am trying to stream a music from remote url. I am trying to run this in iOS 11 but it not play the music.
ViewController
var session = AVAudioSession.sharedInstance()
var LQPlayer: AVPlayer?
let LOW_URL = URL(string: "http://someLInk.pls")! // not an original url provided at this time.
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
self.avPlayerSetup()
}
func avPlayerSetup() {
do {
try session.setCategory(AVAudioSessionCategoryPlayback)
try session.overrideOutputAudioPort(.none)
try session.setActive(true)
} catch {
print("AVPlayer setup error \(error.localizedDescription)")
}
}
func initPlayer() {
LQPlayer = AVPlayer(url: LOW_URL)
print("player allocated")
}
func deAllocPlayer() {
LQPlayer = nil
print("player deallocated")
}
#IBAction func playBtn(_ sender: Any) {
initPlayer()
LQPlayer?.play()
}
#IBAction func pauseBtn(_ sender: Any) {
LQPlayer?.pause()
deAllocPlayer()
}
}
I set Allow Arbitrary Loads YES in info.plist.
Above code the URL I given is dummy. Actual url is working fine.
Working Code with Live Video Stream
#IBOutlet weak var player_View: UIView!
var LQPlayer: AVPlayer?
let LOW_URL = URL(string:"http://www.streambox.fr/playlists/test_001/stream.m3u8")!
override func viewDidLoad() {
super.viewDidLoad()
self.avPlayerSetup()
LQPlayer = AVPlayer.init(url: LOW_URL)
let avPlayerView = AVPlayerViewController()
avPlayerView.view.frame = self.player_View.bounds
avPlayerView.player = LQPlayer
self.player_View.addSubview(avPlayerView.view)
}
func avPlayerSetup() {
do {
try audioSession.setCategory(AVAudioSessionCategoryPlayback)
try audioSession.overrideOutputAudioPort(AVAudioSessionPortOverride.speaker)
try audioSession.setActive(true)
} catch {
print("AVPlayer setup error \(error.localizedDescription)")
}
}
func initPlayer() {
LQPlayer = AVPlayer(url:LOW_URL)
print("player allocated")
}
func deAllocPlayer() {
LQPlayer = nil
print("player deallocated")
}
#IBAction func playBtn(_ sender: Any) {
// initPlayer()
LQPlayer?.play()
}
#IBAction func pauseBtn(_ sender: Any) {
LQPlayer?.pause()
deAllocPlayer()
}