AVPlayer audio buffering in swift 3 source disconnected observer - iphone

I have app that plays AAC audio stream. Everything works fine, but when I disconnect stream and connect again after one second audio stop playing after half minute. When i don't reconnect i have error after one- two minutes. To reconnect i must stop AVPlayer and start again. I want to reconnect stream or show message immediately after player stops play music. How can I do that? Moreover i have another question: I convert my code to swift 3 and I have problem with one line:
fileprivate var playerItem = AVPlayerItem?()
error: cannot invoke initializer without argument
How i can fix that? Maybe this is the problem?
My Radio Player class:
import Foundation
import AVFoundation
import UIKit
protocol errorMessageDelegate {
func errorMessageChanged(_ newVal: String)
}
protocol sharedInstanceDelegate {
func sharedInstanceChanged(_ newVal: Bool)
}
class RadioPlayer : NSObject {
static let sharedInstance = RadioPlayer()
var instanceDelegate:sharedInstanceDelegate? = nil
var sharedInstanceBool = false {
didSet {
if let delegate = self.instanceDelegate {
delegate.sharedInstanceChanged(self.sharedInstanceBool)
}
}
}
fileprivate var player = AVPlayer(url: URL(string: Globals.radioURL)!)
// fileprivate var playerItem = AVPlayerItem?()
fileprivate var isPlaying = false
var errorDelegate:errorMessageDelegate? = nil
var errorMessage = "" {
didSet {
if let delegate = self.errorDelegate {
delegate.errorMessageChanged(self.errorMessage)
}
}
}
override init() {
super.init()
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(url: URL(string: Globals.radioURL)!, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronously(forKeys: [statusKey], completionHandler: {
var error: NSError? = nil
DispatchQueue.main.async(execute: {
let status: AVKeyValueStatus = asset.statusOfValue(forKey: statusKey, error: &error)
if status == AVKeyValueStatus.loaded{
let playerItem = AVPlayerItem(asset: asset)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
NotificationCenter.default.addObserver(
forName: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime,
object: nil,
queue: nil,
using: { notification in
print("Status: Failed to continue")
self.errorMessage = NSLocalizedString("STREAM_INTERUPT", comment:"Stream was interrupted")
})
print("Initializing new player")
}
func resetPlayer() {
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(url: URL(string: Globals.radioURL)!, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronously(forKeys: [statusKey], completionHandler: {
var error: NSError? = nil
DispatchQueue.main.async(execute: {
let status: AVKeyValueStatus = asset.statusOfValue(forKey: statusKey, error: &error)
if status == AVKeyValueStatus.loaded{
let playerItem = AVPlayerItem(asset: asset)
// playerItem.addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.New, context: &ItemStatusContext)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
}
func bufferFull() -> Bool {
return bufferAvailableSeconds() > 45.0
}
func bufferAvailableSeconds() -> TimeInterval {
// Check if there is a player instance
if ((player.currentItem) != nil) {
// Get current AVPlayerItem
let item: AVPlayerItem = player.currentItem!
if (item.status == AVPlayerItemStatus.readyToPlay) {
let timeRangeArray: NSArray = item.loadedTimeRanges as NSArray
if timeRangeArray.count < 1 { return(CMTimeGetSeconds(kCMTimeInvalid)) }
let aTimeRange: CMTimeRange = (timeRangeArray.object(at: 0) as AnyObject).timeRangeValue
// let startTime = CMTimeGetSeconds(aTimeRange.end)
let loadedDuration = CMTimeGetSeconds(aTimeRange.duration)
return (TimeInterval)(loadedDuration);
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
func play() {
player.play()
isPlaying = true
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func pause() {
player.pause()
isPlaying = false
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func currentlyPlaying() -> Bool {
return isPlaying
}
}
I will be grateful for help ;)

For the second issue fileprivate var playerItem = AVPlayerItem?()
write this and it should work fileprivate var playerItem: AVPlayerItem?.
For the first issue
when I disconnect stream and connect again after one second audio stop
playing after half minute. When i don't reconnect i have error after
one- two minutes. To reconnect i must stop AVPlayer and start again. I
want to reconnect stream or show message immediately after player
stops play music. How can I do that?
I don't get what's wrong ? You pause the player by pressing the button then you press the button again and after one - two minutes it stops by itself ?
I have tested the same class today and it works just fine, even after the connection is lost to the server (when the connection resumes you can click the play button and it will play)
I'll leave you my code here, give it a try
import Foundation
import AVFoundation
import UIKit
protocol errorMessageDelegate {
func errorMessageChanged(newVal: String)
}
protocol sharedInstanceDelegate {
func sharedInstanceChanged(newVal: Bool)
}
class RadioPlayer : NSObject {
static let sharedInstance = RadioPlayer()
var instanceDelegate:sharedInstanceDelegate? = nil
var sharedInstanceBool = false {
didSet {
if let delegate = self.instanceDelegate {
delegate.sharedInstanceChanged(newVal: self.sharedInstanceBool)
}
}
}
private var player = AVPlayer(url: NSURL(string: "<# YOUR STREAM HERE #>")! as URL)
private var playerItem: AVPlayerItem?
private var isPlaying = false
var errorDelegate:errorMessageDelegate? = nil
var errorMessage = "" {
didSet {
if let delegate = self.errorDelegate {
delegate.errorMessageChanged(newVal: self.errorMessage)
}
}
}
override init() {
super.init()
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(url: NSURL(string: "<# YOUR STREAM HERE #>")! as URL, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronously(forKeys: [statusKey], completionHandler: {
var error: NSError? = nil
DispatchQueue.main.async(execute: {
let status: AVKeyValueStatus = asset.statusOfValue(forKey: statusKey, error: &error)
if status == AVKeyValueStatus.loaded{
let playerItem = AVPlayerItem(asset: asset)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
NotificationCenter.default.addObserver(
forName: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime,
object: nil,
queue: nil,
using: { notification in
print("Status: Failed to continue")
self.errorMessage = "Stream was interrupted"
})
print("Initializing new player")
}
func resetPlayer() {
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(url: NSURL(string: "<# YOUR STREAM HERE #>")! as URL, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronously(forKeys: [statusKey], completionHandler: {
var error: NSError? = nil
DispatchQueue.main.async(execute: {
let status: AVKeyValueStatus = asset.statusOfValue(forKey: statusKey, error: &error)
if status == AVKeyValueStatus.loaded{
let playerItem = AVPlayerItem(asset: asset)
//playerItem.addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.New, context: &ItemStatusContext)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
}
func bufferFull() -> Bool {
return bufferAvailableSeconds() > 45.0
}
func bufferAvailableSeconds() -> TimeInterval {
// Check if there is a player instance
if ((player.currentItem) != nil) {
// Get current AVPlayerItem
let item: AVPlayerItem = player.currentItem!
if (item.status == AVPlayerItemStatus.readyToPlay) {
let timeRangeArray: NSArray = item.loadedTimeRanges as NSArray
if timeRangeArray.count < 1 { return(CMTimeGetSeconds(kCMTimeInvalid)) }
let aTimeRange: CMTimeRange = (timeRangeArray.object(at: 0) as AnyObject).timeRangeValue
//let startTime = CMTimeGetSeconds(aTimeRange.end)
let loadedDuration = CMTimeGetSeconds(aTimeRange.duration)
return (TimeInterval)(loadedDuration)
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
func play() {
player.play()
isPlaying = true
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func pause() {
player.pause()
isPlaying = false
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func currentlyPlaying() -> Bool {
return isPlaying
}
}

Related

Modify code to play and download mp3 at the same time, not download mp3 then play it in Swift/SwiftUI

I have created this code to play/stop/rewind tracks that will be downloaded at the device and stored in cache. The only problem is that it starts playing only when the track will be fully downloaded. Before that the app freezes completely. It's not a problem with small mp3, but what if it will be 30 minutes audio? I need to modify this code to start audio playing immediately and download the track in the background. Seek help here because there is no guides how to do it with AVAudioPlayer. Thanks.
import Foundation
import AVFoundation
import AVFAudio
final class AudioManager: ObservableObject {
// static let shared = AudioManager()
var player: AVAudioPlayer?
#Published private(set) var isDownloading = false
#Published private(set) var isPlaying: Bool = false {
didSet {
print(isPlaying, "isPlaying")
}
}
#Published private(set) var isLooping: Bool = false
#MainActor func startPlayer(track: String) async {
guard let fileURL = URL(string: track) else { return }
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
let songName = fileURL.lastPathComponent
var soundData: Data
let tracksFolderUrl = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).last!.appendingPathComponent("tracks")
let trackUrl = tracksFolderUrl.appendingPathComponent(songName)
if FileManager.default.fileExists(atPath: trackUrl.path) {
// Load local data if it exists
print("Loading data from \(trackUrl)")
soundData = try Data(contentsOf: trackUrl)
} else {
//… otherwise load from network
isDownloading = true
print("Downloading data from \(fileURL)")
(soundData, _) = try await URLSession.shared.data(from: fileURL)
//… then save to disk
try FileManager.default.createDirectory(at: tracksFolderUrl, withIntermediateDirectories: true)
print("Saving data to \(trackUrl)")
try soundData.write(to: trackUrl)
isDownloading = false
}
self.player = try AVAudioPlayer(data: soundData)
guard let player = player else { return }
player.prepareToPlay()
player.play()
isPlaying = true
}
catch {
print(error)
}
}
func playPause() {
guard let player = player else {
print("Audio player not found")
return
}
if player.isPlaying {
player.pause()
isPlaying = false
} else {
player.play()
isPlaying = true
}
}
func stop() {
guard let player = player else {
print("Audio player not found")
return
}
if player.isPlaying {
player.stop()
isPlaying = false
}
}
func toggleLoop() {
guard let player = player else { return }
player.numberOfLoops = player.numberOfLoops == 0 ? -1 : 0
isLooping = player.numberOfLoops != 0
print("isLooping", isLooping)
}
}
Your current code will suspend while
(soundData, _) = try await URLSession.shared.data(from: fileURL)
is downloading data.
To stream files initialise the AVPlayer with the URL:
AVPlayer(url: url)
If you also want to download the data to save locally you can do that separately.
To get the duration of the track:
player.currentItem?.duration
to get the current playback position:
player.currentTime()
Apple's documentation is here.

How to cache a .mp3 from JSON in Swift?

I have a function that downloads mp3 file from URL, passes it to AVAudioPlayer and then plays it in PlayerView. I want to implement a feature. When a mp3 will be downloaded, I want to be cached in the app files so If I open it later It wouldn't be downloaded. I saw tutorials of how to do this with Images, but not with mp3. How can this be created?
// Audio Manager itself
import Foundation
import AVFoundation
import AVFAudio
final class AudioManager: ObservableObject {
// static let shared = AudioManager()
var player: AVAudioPlayer?
#Published private(set) var isPlaying: Bool = false {
didSet {
print(isPlaying, "isPlaying")
}
}
func startPlayer(track: String) {
guard let fileURL = URL(string: track) else { return }
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
let soundData = try Data(contentsOf: fileURL)
self.player = try AVAudioPlayer(data: soundData)
guard let player = player else { return }
player.prepareToPlay()
player.play()
isPlaying = true
}
catch {
print(error)
}
}
func playPause() {
guard let player = player else {
print("Audio player not found")
return
}
if player.isPlaying {
player.pause()
isPlaying = false
} else {
player.play()
isPlaying = true
}
}
func stop() {
guard let player = player else {
print("Audio player not found")
return
}
if player.isPlaying {
player.stop()
isPlaying = false
}
}
}
// Main thing in my PlayerView. Passes the track to the audioManager
.onAppear {
// AudioManager.shared.startPlayer(track: "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3")
DispatchQueue.main.async {
audioManager.startPlayer(track: track ?? "")
}
}
A simple way to do this would just be to write the Data that you download straight to a file. The next time you try to play that track, check if a file for it exists and load that local file instead.
Here's a (fairly naive) example:
final class AudioManager: ObservableObject {
// static let shared = AudioManager()
var player: AVAudioPlayer?
#Published private(set) var isDownloading = false
#Published private(set) var isPlaying: Bool = false
// MainActor so it always runs on the main queue
#MainActor func startPlayer(track: String) async {
guard let url = URL(string: track) else { return }
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
let songName = url.lastPathComponent
var soundData: Data
let tracksFolderUrl = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).last!.appendingPathComponent("tracks")
let trackUrl = tracksFolderUrl.appendingPathComponent(songName)
if FileManager.default.fileExists(atPath: trackUrl.path) {
// Load local data if it exists
print("Loading data from \(trackUrl)")
soundData = try Data(contentsOf: trackUrl)
} else {
//… otherwise load from network
isDownloading = true
print("Downloading data from \(url)")
(soundData, _) = try await URLSession.shared.data(from: url)
//… then save to disk
try FileManager.default.createDirectory(at: tracksFolderUrl, withIntermediateDirectories: true)
print("Saving data to \(trackUrl)")
try soundData.write(to: trackUrl)
isDownloading = false
}
self.player = try AVAudioPlayer(data: soundData)
guard let player = player else { return }
player.prepareToPlay()
player.play()
isPlaying = true
}
catch {
print(error)
}
}
}
struct ContentView: View {
#StateObject var audioManager = AudioManager()
var body: some View {
ZStack {
if audioManager.isDownloading {
VStack {
Text("Downloading")
ProgressView()
}
} else {
Text("Playing")
}
}
.task {
await audioManager.startPlayer(track: "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3")
}
}
}
Note that I've made the startPlayer func async so it doesn't block the main thread and used a different method to download the data
try await URLSession.shared.data(from: url)

AudioKit crashes when try record after playing in AKPlayer

Here’s the problem I have. I have an app that loads sounds from the web and plays it afterward. At the same time, the app has the feature to record voice and play it. I use two different View Controllers for these two features. For these tasks, I’ve created a singleton.
When I launch the app and go directly to the Record View Controller first - everything works fine (I can record the sound, play it, then I can go to my second View Controller with my sounds and can play it with no problems too).
However, if I launch the app and play the sounds FIRST (use my second view controller) and after this go to Record View Controller and try to record something I get this crash with the message : Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: '[[busArray objectAtIndexedSubscript:(NSUInteger)element] setFormat:format error:&nsErr]: returned false, error Error Domain=NSOSStatusErrorDomain Code=-10865 "(null)"'
This crash happens when I taped recordButton. It happens on line - try recorder.record():
func startRecord() {
if AKSettings.headPhonesPlugged {
micBooster.gain = 1
}
micBooster.gain = 0
do {
try recorder.record()
} catch {
print("Can't record because: \(error)")
}
}
This crash happens if I set category .playAndRecord
try audioSession.setCategory(.playAndRecord, mode: .default, options: .mixWithOthers)
My singleton class:
class AudioKitSingleton {
var mic: AKMicrophone!
var micMixer: AKMixer!
var recorder: AKNodeRecorder!
var tape: AKAudioFile!
var player: AKPlayer!
var micBooster: AKBooster!
var mainMixer: AKMixer!
var url: URL?
var name: String?
var artist: String?
var uploader: String?
var performer: String?
var load: Bool = false
var isPlay: Bool = false
var categoryID: Int? = nil
static let shared = AudioKitSingleton()
func initMic() {
stopAudioKitEngine()
AKAudioFile.cleanTempDirectory()
AKSettings.bufferLength = .medium
AKSettings.defaultToSpeaker = true
setupRecordSession()
mic = AKMicrophone()
micMixer = AKMixer(mic)
micBooster = AKBooster(micMixer)
micBooster.gain = 0
recorder = try? AKNodeRecorder(node: micMixer)
if let file = recorder.audioFile {
player = AKPlayer(audioFile: file)
}
player.isLooping = false
mainMixer = AKMixer(player, micBooster)
AudioKit.output = mainMixer
startAudioKitEngine()
}
func deinitMic() {
stopAudioKitEngine()
mic = nil
micMixer = nil
recorder = nil
tape = nil
player = nil
micBooster = nil
mainMixer = nil
}
func startRecord() {
if AKSettings.headPhonesPlugged {
micBooster.gain = 1
}
micBooster.gain = 0
do {
try recorder.record()
} catch {
print("Can't record because: \(error)")
}
}
func stopRecord() {
micBooster.gain = 0
tape = recorder.audioFile!
player.load(audioFile: tape)
player.prepare()
if let _ = player.audioFile?.duration {
recorder.stop()
}
}
func resetRecorder() {
do {
try recorder.reset()
} catch {
print("Can't reset recorder because: \(error)")
}
}
func playerPlayRecord() {
player.play()
}
func playerStopRecord() {
player.stop()
}
func setupRecordSession() {
do {
try audioSession.setCategory(.record, mode: .default, options: .mixWithOthers)
} catch {
print(error)
}
}
func startAudioKitEngine() {
do {
try AudioKit.start()
} catch {
AKLog("AudioKit did not start because: \(error)")
}
}
func stopAudioKitEngine() {
if AudioKit.engine.isRunning {
do {
try AudioKit.stop()
} catch {
AKLog("AudioKit did not start because: \(error)")
}
}
}
func setupPlayer(url: URL) {
if AudioKit.engine.isRunning {
stopAudioKitEngine()
}
player = try AKPlayer(url: url)
AudioKit.output = player
startAudioKitEngine()
}
func setupPlayer(mixloop: AVAudioFile) {
if AudioKit.engine.isRunning {
stopAudioKitEngine()
}
player = try AKPlayer(audioFile: mixloop)
AudioKit.output = player
startAudioKitEngine()
}
func play() {
try player?.play()
}
func resume() {
try player?.resume()
}
func stop() {
player?.stop()
}
func pause() {
player?.pause()
}
func remove() {
if AudioKit.engine.isRunning {
try? AudioKit.stop()
player?.stop()
player = nil
recorder = nil
AudioKit.output = nil
url = nil
name = nil
artist = nil
uploader = nil
load = false
isPlay = false
}
}
}
This crash happens on all iPhones except iPhone 5s, iOS12. Need help.
I'm pretty sure this will be solved in today's AudioKit 4.5.2 release, uploading now.

Record And Play Voice in Separate Class (Swift3)

I used many codes that was for record an play the voice, but most of them are not in swift3 and they don't work in my app.
This code works, but I want to create a separate class from the viewcontroller that do recording an playing voices. Also the mentioned github code is complex an I'm searching for a simplified code.
Update:
After recording, when I check existence of the recorded file, the file doesn't exist, and it raises EXC_BAD_ACCESS error on appDelegate.
What's wrong?
Any suggestions would be appreciated.
Try to record audio by wirting line
let isRec = AudioManager.shared.record(fileName: "rec")
if isRec returned true then recording is happening else not.
To finish recording use : let recordedURL = AudioManager.shared.finishRecording()
To play recorded file send above url to setupUpPlayer() function in manager class
Not to forget to use extension code snippets give below the code snippet which are delegate functions of AVAudioRecorder and AVAudioPlayer
import Foundation
import AVFoundation
class AudioManager: NSObject {
static let shared = AudioManager()
var recordingSession: AVAudioSession?
var recorder: AVAudioRecorder?
var meterTimer: Timer?
var recorderApc0: Float = 0
var recorderPeak0: Float = 0
//PLayer
var player: AVAudioPlayer?
var savedFileURL: URL?
func setup() {
recordingSession = AVAudioSession.sharedInstance()
do {
try recordingSession?.setCategory(AVAudioSessionCategoryPlayAndRecord, with: .defaultToSpeaker)
try recordingSession?.setActive(true)
recordingSession?.requestRecordPermission({ (allowed) in
if allowed {
print("Mic Authorised")
} else {
print("Mic not Authorised")
}
})
} catch {
print("Failed to set Category", error.localizedDescription)
}
}
func record(fileName: String) -> Bool {
setup()
let url = getUserPath().appendingPathComponent(fileName + ".m4a")
let audioURL = URL.init(fileURLWithPath: url.path)
let recordSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue,
AVNumberOfChannelsKey: 2,
AVSampleRateKey: 44100.0]
do {
recorder = try AVAudioRecorder.init(url: audioURL, settings: recordSettings)
recorder?.delegate = self
recorder?.isMeteringEnabled = true
recorder?.prepareToRecord()
recorder?.record()
self.meterTimer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true, block: { (timer: Timer) in
//Update Recording Meter Values so we can track voice loudness
if let recorder = self.recorder {
recorder.updateMeters()
self.recorderApc0 = recorder.averagePower(forChannel: 0)
self.recorderPeak0 = recorder.peakPower(forChannel: 0)
}
})
savedFileURL = url
print("Recording")
return true
} catch {
print("Error Handling", error.localizedDescription)
return false
}
}
func getUserPath() -> URL {
return FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
}
func finishRecording() -> String {
recorder?.stop()
self.meterTimer?.invalidate()
var fileURL: String?
if let url: URL = recorder?.url {
fileURL = String(describing: url)
}
return /fileURL
}
//Player
func setupPlayer(_ url: URL) {
do {
try player = AVAudioPlayer.init(contentsOf: url)
} catch {
print("Error1", error.localizedDescription)
}
player?.prepareToPlay()
player?.play()
player?.volume = 1.0
player?.delegate = self
}
}
//MARK:- Audio Recorder Delegate
extension AudioManager: AVAudioRecorderDelegate {
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
print("AudioManager Finish Recording")
}
func audioRecorderEncodeErrorDidOccur(_ recorder: AVAudioRecorder, error: Error?) {
print("Encoding Error", /error?.localizedDescription)
}
}
//MARK:- Audio Player Delegates
extension AudioManager: AVAudioPlayerDelegate {
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer,
successfully flag: Bool) {
player.stop()
print("Finish Playing")
}
func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer,
error: Error?) {
print(/error?.localizedDescription)
}
}

AVPlayer is not working when I'm trying to use with this YTVimeoExtractor

When I try to play the video through the AVPlayer, the video loads for some time(the loading symbol appears at the top of the player) then suddenly it stops and the play icon with a crossover is shown. Don't know what is wrong? I can get the video informations but I can't make the video to play.
I will show what I have done. Could anyone answer my question, help is much appreciated.
import UIKit
import AVKit
import AVFoundation
class ViewController: UIViewController {
var playerVC : AVPlayerViewController!
var playerItem : AVPlayerItem!
var player : AVPlayer!
var playerLayer: AVPlayerLayer!
#IBOutlet weak var videoURL: UITextField!
#IBOutlet weak var videoTitle: UILabel!
#IBAction func playVideo(sender: AnyObject) {
YTVimeoExtractor.sharedExtractor().fetchVideoWithVimeoURL(self.videoURL.text!, withReferer: nil, completionHandler: {(video, error) -> Void in
if video != nil {
// https://vimeo.com/165891648
self.videoTitle.text = video?.title
print("hello: \(self.videoTitle.text)")
let url = NSURL(string: self.videoURL.text!)
// let url = NSURL.init(fileURLWithPath: self.videoURL.text!)
self.playerItem = AVPlayerItem.init(URL: url!)
self.player = AVPlayer.init(playerItem: self.playerItem)
self.playerVC = AVPlayerViewController.init();
self.playerVC.player = self.player;
self.player.currentItem!.playbackLikelyToKeepUp
self.presentViewController(self.playerVC, animated: true) { () -> Void in
self.playerVC.player?.play()
}
}else {
let alert = UIAlertController(title: error!.localizedDescription, message: error!.localizedFailureReason, preferredStyle: UIAlertControllerStyle.Alert)
alert.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.Default, handler: nil))
self.presentViewController(alert, animated: true, completion: nil)
}
})
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
}
override func viewDidAppear(animated: Bool) {
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
This is the one I get on the simulator.
I used this https://github.com/lilfaf/YTVimeoExtractor one to do this sample project. You can try it out and let me know.
I had the same issue and fix using this way:
#IBAction func btnDownload_touchUpInside(_ sender: UIButton) {
YTVimeoExtractor.shared().fetchVideo(withVimeoURL: self.videoURL.text!, withReferer: nil, completionHandler: {(video, error) -> Void in
if video != nil {
self.videoTitle.text = video?.title
if let streamUrls = video?.streamURLs
{
var streamURL: String?
var streams : [String:String] = [:]
for (key,value) in streamUrls {
streams["\(key)"] = "\(value)"
print("\(key) || \(value)")
}
if let large = streams["720"]
{
streamURL = large
}
else if let high = streams["480"]
{
streamURL = high
}
else if let medium = streams["360"]
{
streamURL = medium
}
else if let low = streams["270"]
{
streamURL = low
}
if let url = streamURL
{
Alamofire.download(url, to: { (temporaryURL, response) -> (destinationURL: URL, options: DownloadRequest.DownloadOptions) in
if let directoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as? NSURL {
let fileName = response.suggestedFilename!
let finalPath = directoryURL.appendingPathComponent(fileName)
self.downloadedVideoPath = finalPath?.absoluteString
return (finalPath!, DownloadRequest.DownloadOptions(rawValue: 2))
}
}).downloadProgress(closure: { (progress) in
print("Progress: \(progress.fractionCompleted)")
})
}
}
}
})
}
#IBAction func btnPlayOffLine_touchUpInside(_ sender: UIButton) {
YTVimeoExtractor.shared().fetchVideo(withVimeoURL: self.videoURL.text!, withReferer: nil, completionHandler: {(video, error) -> Void in
if video != nil {
let videoURL = NSURL(string: self.downloadedVideoPath!)
let player = AVPlayer(url: videoURL! as URL)
let playerViewController = AVPlayerViewController()
playerViewController.player = player
self.present(playerViewController, animated: true) {
playerViewController.player!.play()
}
}
})
}
#IBAction func btnPlayOnLine_touchUpInside(_ sender: UIButton) {
YTVimeoExtractor.shared().fetchVideo(withVimeoURL: self.videoURL.text!, withReferer: nil, completionHandler: {(video, error) -> Void in
if video != nil {
self.videoTitle.text = video?.title
if let streamUrls = video?.streamURLs
{
var streamURL: String?
var streams : [String:String] = [:]
for (key,value) in streamUrls {
streams["\(key)"] = "\(value)"
print("\(key) || \(value)")
}
if let large = streams["720"]
{
streamURL = large
}
else if let high = streams["480"]
{
streamURL = high
}
else if let medium = streams["360"]
{
streamURL = medium
}
else if let low = streams["270"]
{
streamURL = low
}
if let url = streamURL
{
let videoURL = NSURL(string: url)
let player = AVPlayer(url: videoURL! as URL)
let playerViewController = AVPlayerViewController()
playerViewController.player = player
self.present(playerViewController, animated: true) {
playerViewController.player!.play()
}
}
}
}
})
}
}