cell frame size changing on rotate - swift

I am resizing a cell frame on the iPhone X to embed an instance of AVPlayerController. When i change orientation from portrait to landscape the frame size seems to change.
I end up with the controls (full screen + volume) overlapping the header and title.
Would you recommend a solution other than :
self.frame.insetBy
Here is a demo of how it looks :
iphone x demo
import UIKit
import AVKit
class VGMediaPlayerCell: VGBaseCell {
let statusBarHeight: CGFloat = 20
let contentOffset: CGFloat = 50
static let vgReuseIdentifier = "VGMediaPlayerCell"
static var playerIsPlaying: Bool = false
var toggleHeaderVisibility: Bool = false
public weak var delegate: VGMediaPlayerCellDelegate?
var moviePlayerController = AVPlayerViewController()
var waitingIndicator = UIActivityIndicatorView(style: UIActivityIndicatorView.Style.whiteLarge)
var containerView = UIView()
var messageLabel = UILabel()
var needAutoPlay: Bool = false
var isLoaded: Bool = false
var asset: AVAsset?
var isReadyForDisplayObserver: NSKeyValueObservation?
var content: VGContent?
let deviceOrientation = UIDevice.current.orientation
//player settings
#objc var player: AVPlayer?
var PlayerViewConroller: AVPlayerViewController?
override init(frame: CGRect) {
super.init(frame: frame)
setupWaitingIndicator()
setupMessageLabel()
isReadyForDisplayObserver = moviePlayerController.observe(\.isReadyForDisplay) { [weak self] (_, _) in
guard let `self` = self else {
return
}
// When the first frame of the video is loaded, we dismiss the waiting indicator.
DispatchQueue.main.async {
if self.moviePlayerController.isReadyForDisplay {
self.waitingStateActive(isActive: false)
}
}
}
}
override func prepareForReuse() {
super.prepareForReuse()
self.isLoaded = false
needAutoPlay = false
moviePlayerController.player = nil
content = nil
asset = nil
player = nil
contextualLabel.font = nil
messageLabel.text = nil
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
// MARK: - View creation
func setupContainerView() {
addSubview(containerView)
containerView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
containerView.leftAnchor.constraint(equalTo: leftAnchor),
containerView.rightAnchor.constraint(equalTo: rightAnchor),
containerView.topAnchor.constraint(equalTo: topAnchor),
containerView.bottomAnchor.constraint(equalTo: bottomAnchor)
])
}
func setupMessageLabel() {
addSubview(messageLabel)
messageLabel.textAlignment = .center
messageLabel.textColor = .white
messageLabel.numberOfLines = 2
messageLabel.isHidden = true
messageLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
messageLabel.leftAnchor.constraint(equalTo: leftAnchor, constant: 10),
messageLabel.rightAnchor.constraint(equalTo: rightAnchor, constant: -10),
messageLabel.heightAnchor.constraint(equalToConstant: 50),
messageLabel.centerYAnchor.constraint(equalTo: centerYAnchor)
])
}
func setupWaitingIndicator() {
addSubview(waitingIndicator)
waitingIndicator.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
waitingIndicator.centerXAnchor.constraint(equalTo: centerXAnchor),
waitingIndicator.centerYAnchor.constraint(equalTo: centerYAnchor),
waitingIndicator.widthAnchor.constraint(equalToConstant: 100),
waitingIndicator.heightAnchor.constraint(equalToConstant: 100)
])
}
// MARK: - Utils
func configurePlayer(with viewModel: VGMediaPlayerViewModel) {
//to update message label + loader
updateUI(with: viewModel)
if viewModel.error == ErrorMessage.noNetwork.rawValue {
self.stop()
}
// Create a new AVPlayer and AVPlayerLayer
guard let url = URL(string: viewModel.content?.contentURL ?? "") else { return }
self.player = AVPlayer(url: url)
// We want video controls so we need an AVPlayerViewController
PlayerViewConroller = AVPlayerViewController()
PlayerViewConroller?.player = player
PlayerViewConroller?.videoGravity = AVLayerVideoGravity.resizeAspect
insertSubview(avPlayerViewConroller!.view, at: 0)
PlayerViewConroller!.view.topAnchor.constraint(equalTo: topAnchor).isActive = true
PlayerViewConroller!.view.leftAnchor.constraint(equalTo: leftAnchor).isActive = true
PlayerViewConroller!.view.bottomAnchor.constraint(equalTo: bottomAnchor).isActive = true
PlayerViewConroller!.view.rightAnchor.constraint(equalTo: rightAnchor).isActive = true
self.bringSubviewToFront((avPlayerViewConroller?.view!)!)
if #available(iOS 10.0, *) {
self.player?.automaticallyWaitsToMinimizeStalling = false
}
guard let asset = viewModel.avAsset else { return }
if !asset.isPlayable {
DispatchQueue.main.async {
self.waitingStateActive(isActive: false)
self.displayError(message: ErrorMessage.noPreview.rawValue)
}
}
DispatchQueue.main.async {
// Create a new AVAsset from the URL
let videoAsset = AVAsset(url: url)
// // Now we need an AVPlayerItem to pass to the AVPlayer
let videoPlayerItem = AVPlayerItem(asset: videoAsset)
// // Finally, we set this as the current AVPlayer item
self.player?.replaceCurrentItem(with: videoPlayerItem)
if self.needAutoPlay {
self.player?.play()
}
self.isLoaded = true
}
//custom insets per device orientation
// regular from for iphone 8 and downwards
// custom frame for iphone X and upwards
if UIDevice().userInterfaceIdiom == .phone {
switch UIScreen.main.nativeBounds.height {
//iPhone 5 or 5S or 5C, iPhone 6/6S/7/8, iPhone 6+/6S+/7+/8+
case 1136, 1334, 1920, 2208:
PlayerViewConroller?.view.frame = self.frame
//iPhone X, Xs, iPhone Xs Max, iPhone Xr
case 2436, 2688, 1792:
if UIApplication.shared.statusBarOrientation.isPortrait {
PlayerViewConroller?.view.frame = self.frame.insetBy(dx: 0.0, dy: 50.0)
} else if deviceOrientation == .landscapeLeft || deviceOrientation == .landscapeRight {
PlayerViewConroller?.view.frame = self.frame.insetBy(dx: 30.0, dy: 30.0)
}
default: break
}
} else {
//for the iPad
PlayerViewConroller?.view.frame = self.frame
}
//Add observer on keypath rate to monitor player's playing status
if self.toggleHeaderVisibility == true {
if UIDevice().userInterfaceIdiom == .phone {
switch UIScreen.main.nativeBounds.height {
case 2436, 2688, 1792:
player?.addObserver(self, forKeyPath: "rate", options: [.old, .new], context: nil)
default : break
}
}
}
player?.addObserver(self, forKeyPath: "rate", options: [.old, .new], context: nil)
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
if object as AnyObject? === player {
if keyPath == "rate" {
guard let rate = player?.rate else { return }
if rate > Float(0.0) {
VGMediaPlayerCell.playerIsPlaying = true
NotificationCenter.default.post(name: .playerDidStartPlay, object: nil)
} else {
VGMediaPlayerCell.playerIsPlaying = false
NotificationCenter.default.post(name: .playerDidStop, object: nil)
}
}
}
}
func updateUI(with viewModel: VGMediaPlayerViewModel) {
messageLabel.isHidden = true
//indicating waiting state with spinner
waitingStateActive(isActive: viewModel.isLoading)
}
/**
Cancel asset loading
*/
func cancelLoading() {
asset?.cancelLoading()
}
/**
Show an error with a specific message
- parameter message: A message
*/
func displayError(message: String) {
messageLabel.text = message
messageLabel.isHidden = false
containerView.isHidden = true
}
/**
Update the waiting indicator state
- parameter active: A boolean value that indicate if the waiting indicator need to be active or not.
*/
func waitingStateActive(isActive: Bool) {
isActive ? waitingIndicator.startAnimating() : waitingIndicator.stopAnimating()
containerView.isHidden = isActive
}
}

Related

Swift capture photos with portrait effect matte

I would like to implement a camera to capture portrait photos like apples default camera does. Portraiteffect and depnhdata is enabled in the PhotoSettings.
AVCapturePhotoSettings
self.output.isPortraitEffectsMatteDeliveryEnabled = true
self.output.isDepthDataDeliveryEnabled = true
photoSettings.isPortraitEffectsMatteDeliveryEnabled = true
photoSettings.isDepthDataDeliveryEnabled = true
photoSettings.embedsDepthDataInPhoto = true
photoSettings.embedsPortraitEffectsMatteInPhoto = true
printing the AVCapturePhoto.portraitEffectsMatte returns Optional(L008 2080x1170 v.1.1) but neither in my preview layer nor in the saved image is the portrait effect visible.
Additional context
AVCaptureDevice uses the builtInDualWideCamera
Printing the output.portraitEffekt logs also true
Full code:
class ViewController: UIViewController {
var session: AVCaptureSession?
var output = AVCapturePhotoOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
private func setUpCamera(){
let session = AVCaptureSession()
if let device = AVCaptureDevice.default(.builtInDualWideCamera, for: .video, position: (useFrontCamera ? AVCaptureDevice.Position.front : AVCaptureDevice.Position.back)){
do {
let input = try AVCaptureDeviceInput(device: device)
if session.canAddInput(input){
session.addInput(input)
}
if session.canAddOutput(output){
session.addOutput(output)
}
previewLayer.videoGravity = .resizeAspectFill
previewLayer.session = session
session.startRunning()
self.session = session
}
catch {
print(error)
}
}
}
private func getSettings() -> AVCapturePhotoSettings{
var photoSettings = AVCapturePhotoSettings()
if(self.output.isPortraitEffectsMatteDeliverySupported && self.output.isDepthDataDeliverySupported){
self.output.isPortraitEffectsMatteDeliveryEnabled = true
self.output.isDepthDataDeliveryEnabled = true
photoSettings.isPortraitEffectsMatteDeliveryEnabled = true
photoSettings.isDepthDataDeliveryEnabled = true
photoSettings.embedsDepthDataInPhoto = true
photoSettings.embedsPortraitEffectsMatteInPhoto = true
}
return photoSettings
}
private funk takePhoto(){
self.output.capturePhoto(with: self.getSettings(), delegate: self)
}
}
extension ViewController: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let data = photo.fileDataRepresentation() else { return}
let image = UIImage(data: data)
let imageView = UIImageView(image: image)
session?.stopRunning()
imageView.contentMode = .scaleAspectFill
imageView.frame = CGRect(x: 0, y: 0, width: view.frame.width/4, height: view.frame.height/4)
imageView.layer.name = "photoPreview"
view.addSubview(imageView)
UIImageWriteToSavedPhotosAlbum(image!, self, nil, nil)
}
}
```

Datas not received with GameKit on swift

I'm currently making a multiplayer game with GameKit. I want to create a waiting viewController while each player receive the array of players and what they selected for their character.
Here is my extension
extension LoadingViewController: GKMatchDelegate {
func sendData() {
guard let match = match else { return }
do {
guard let data = gameModel.encode() else { return }
try match.sendData(toAllPlayers: data, with: .reliable)
} catch {
print("Send data failed")
}
}
func match(_ match: GKMatch, didReceive data: Data, fromRemotePlayer player: GKPlayer) {
guard let model = GameModel.decode(data: data) else { return }
gameModel = model
}
}
My Override which waits until 2 players fill the gameModel
override func viewDidLoad() {
super.viewDidLoad()
Timer.scheduledTimer(withTimeInterval: 2, repeats: true) { timer in
self.setupPlayers()
if self.gameModel.players.count == 2 {
if let view = self.view as! SKView? {
// Load the SKScene from 'GameScene.sks'
if let scene = SKScene(fileNamed: "GameScene") as? GameScene {
scene.match = self.match
scene.gameModel = self.gameModel
scene.localPlayer = self.localPlayer
scene.size = view.bounds.size
scene.scaleMode = .resizeFill
// Present the scene
view.presentScene(scene)
timer.invalidate()
view.ignoresSiblingOrder = true
view.showsFPS = true
view.showsNodeCount = true
}
}
}
}
}
And the setupPlayers which is called each time to try adding a player and set his preferencies
private func setupPlayers() {
guard let player2Name = match?.players.first?.displayName else { return }
let player1 = Player(displayName: GKLocalPlayer.local.displayName)
let player2 = Player(displayName: player2Name)
var players = [player1,player2]
players.sort { (player1, player2) -> Bool in
player1.displayName < player2.displayName
}
if players.first?.displayName == GKLocalPlayer.local.displayName {
if gameModel.players.count == 0 {
players[0].index = .one
players[0].race = .orc
gameModel.players.append(players[0])
localPlayer = players[0]
sendData()
}
} else {
if gameModel.players.count == 1 {
players[1].index = .two
players[1].race = .human
gameModel.players.append(players[1])
localPlayer = players[1]
sendData()
}
}
}
However the scene does not appear when i'm doing simulations, I tried finding the bug and when the first player go in setup Players it works and gameModel.players.count is now 1 but the second player never receive it and his own gameModel stay to one
Does anyone know why ?

How to remove a custom playerView from superView?

I have a videPlayerView which has ContainerView on top of it to show activityIndicatorView. The view gets loaded when i select an item from a collectionView inside a cell. My question is how to remove this view using the cancelButton, i tried removeFromSuperview() for both the container and the playerView but the app crashes with this error
AQDefaultDevice (1): skipping input stream 0 0 0x0
Here is the code:
class VideoPlayerView: UIView {
var videoUrl: String!
var uuidd: String!
let activityIndicatorView: UIActivityIndicatorView = {
let aiv = UIActivityIndicatorView(activityIndicatorStyle: .whiteLarge)
aiv.translatesAutoresizingMaskIntoConstraints = false
aiv.startAnimating()
return aiv
}()
lazy var controlsContainerView: UIView = {
let view = UIView()
view.backgroundColor = UIColor(white: 0, alpha: 1)
view.isUserInteractionEnabled = true
view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(handlezoomout)))
return view
}()
func handlezoomout(hh: UITapGestureRecognizer){
print("n3me")
}
lazy var cancelButton: UIButton = {
let cancelButton = UIButton()
cancelButton.setImage(#imageLiteral(resourceName: "cancel"), for: UIControlState())
cancelButton.addTarget(self, action: #selector(cancel), for: .touchUpInside)
return cancelButton
}()
func cancel() {
controlsContainerView.removeFromSuperview()
let video = VideoPlayerView()
video.removeFromSuperview()
}
lazy var pausePlayButton: UIButton = {
let button = UIButton(type: .system)
let image = UIImage(named: "pause")
button.setImage(image, for: UIControlState())
button.translatesAutoresizingMaskIntoConstraints = false
button.tintColor = .white
button.isHidden = true
button.addTarget(self, action: #selector(handlePause), for: .touchUpInside)
return button
}()
var isPlaying = false
func handlePause() {
if isPlaying {
player?.pause()
pausePlayButton.setImage(UIImage(named: "play"), for: UIControlState())
} else {
player?.play()
pausePlayButton.setImage(UIImage(named: "pause"), for: UIControlState())
}
isPlaying = !isPlaying
}
override init(frame: CGRect) {
super.init(frame: frame)
setUpPlayerView()
controlsContainerView.frame = frame
addSubview(controlsContainerView)
cancelButton.frame = CGRect(x: 16.0, y: 20.0, width: 30.0, height: 30.0)
controlsContainerView.addSubview(cancelButton)
controlsContainerView.addSubview(activityIndicatorView)
activityIndicatorView.centerXAnchor.constraint(equalTo: centerXAnchor).isActive = true
activityIndicatorView.centerYAnchor.constraint(equalTo: centerYAnchor).isActive = true
controlsContainerView.addSubview(pausePlayButton)
pausePlayButton.centerXAnchor.constraint(equalTo: centerXAnchor).isActive = true
pausePlayButton.centerYAnchor.constraint(equalTo: centerYAnchor).isActive = true
pausePlayButton.widthAnchor.constraint(equalToConstant: 50).isActive = true
pausePlayButton.heightAnchor.constraint(equalToConstant: 50).isActive = true
backgroundColor = UIColor.black
}
var player: AVPlayer?
fileprivate func setUpPlayerView() {
let postQuery = PFQuery(className: "posts")
postQuery.whereKey("uuid", equalTo: PostUuidGlobalVariable.postuuid.last!)
postQuery.getFirstObjectInBackground { (object, error) in
if (error == nil && object != nil) {
let videoFile = object!["video"] as! PFFile
if let url = URL (string: videoFile.url!) {
self.player = AVPlayer(url: url)
let playerLayer = AVPlayerLayer(player: self.player)
self.layer.addSublayer(playerLayer)
playerLayer.frame = self.frame
self.player?.play()
self.player?.isMuted = false
self.player?.addObserver(self, forKeyPath: "currentItem.loadedTimeRanges", options: .new, context: nil)
NotificationCenter.default.addObserver(self, selector: #selector(self.playerDidFinishPlaying(note:)),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: self.player?.currentItem)
}
}
}
}
func playerDidFinishPlaying(note: NSNotification) {
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
//this is when the player is ready and rendering frames
if keyPath == "currentItem.loadedTimeRanges" {
activityIndicatorView.stopAnimating()
pausePlayButton.isHidden = false
isPlaying = true
}
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
class VideoLauncher: NSObject {
func showVideoPlayer() {
if let keyWindow = UIApplication.shared.keyWindow {
let view = UIView(frame: keyWindow.frame)
view.isUserInteractionEnabled = true
view.backgroundColor = UIColor.white
view.frame = CGRect(x: keyWindow.frame.width - 10, y: keyWindow.frame.height - 10, width: 10, height: 10)
let height = keyWindow.frame.height
let videoPlayerFrame = CGRect(x: 0, y: 0, width: keyWindow.frame.width, height: height)
let videoPlayerView = VideoPlayerView(frame: videoPlayerFrame)
view.addSubview(videoPlayerView)
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseOut, animations: {
view.frame = keyWindow.frame
}, completion: { (completedAnimation) in
//maybe we'll do something here later...
UIApplication.shared.isStatusBarHidden = true
})
keyWindow.addSubview(view)
}
}
}
There is a chance this is related to you changing the user interface outside of the main thread.
From UIView documents
'Threading Considerations
Manipulations to your application’s user interface must occur on the main thread. Thus, you should always call the methods of the
UIView
class from code running in the main thread of your application. The only time this may not be strictly necessary is when creating the view object itself, but all other manipulations should occur on the main thread.'
Also your cancel function creates a new video player view and then tries to remove it from it parent which doesn't look correct.
Your cancel callback should probably be as follows
func cancel() {
DispatchQueue.main.async { [unowned self] in
// to remove controls
self.controlsContainerView.removeFromSuperview()
// to remove video player view
self.view.removeFromSuperview()
}
}

Swift 3: How do I enable flash on custom AVFoundation camera?

I have a very basic AVFoundation Camera that has a captureButton that will take a photo and send that photo to the secondCameraController for it to be displayed. My problem is that there is a lot of iOS 10 deprecation and I'm not sure how I add in a flash when I press the captureButton. Any help will be highly appreciated. My code is below. Thank you guys.
class CameraController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
let captureSession = AVCaptureSession()
var previewLayer: CALayer!
var captureDevice: AVCaptureDevice!
var takePhoto: Bool = false
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .white
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
prepareCamera()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
navigationController?.setNavigationBarHidden(true, animated: true)
}
let cameraView: UIView = {
let view = UIView()
view.backgroundColor = .red
return view
}()
func prepareCamera() {
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .back).devices {
captureDevice = availableDevices.first
beginSession()
}
}
func beginSession() {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(captureDeviceInput)
} catch {
print(error.localizedDescription)
}
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
self.previewLayer = previewLayer
self.view.layer.addSublayer(self.previewLayer)
self.previewLayer.frame = CGRect(x: 0, y: 0, width: view.frame.width, height: view.frame.height)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.view.addSubview(captureButton)
let width: CGFloat = 85
captureButton.frame = CGRect(x: (previewLayer.frame.width / 2) - width / 2, y: (previewLayer.frame.height) - width - 25, width: width, height: 85)
captureSession.startRunning()
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString): NSNumber(value: kCVPixelFormatType_32BGRA)]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput) {
captureSession.addOutput(dataOutput)
}
captureSession.commitConfiguration()
let queue = DispatchQueue(label: "com.cheekylabsltd.camera")
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
}
func handleCapture() {
takePhoto = true
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if takePhoto {
takePhoto = false
if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {
let secondController = SecondCameraController()
secondController.takenPhoto = image
DispatchQueue.main.async {
self.present(secondController, animated: true, completion: {
self.stopCaptureSession()
})
}
}
}
}
func getImageFromSampleBuffer(buffer: CMSampleBuffer) -> UIImage? {
if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext()
let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))
if let image = context.createCGImage(ciImage, from: imageRect) {
return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .right)
}
}
return nil
}
func stopCaptureSession() {
self.captureSession.stopRunning()
if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
for input in inputs {
self.captureSession.removeInput(input)
}
}
}
lazy var captureButton: UIButton = {
let button = UIButton(type: .system)
button.backgroundColor = .white
button.layer.cornerRadius = 42.5
button.clipsToBounds = true
button.alpha = 0.40
button.layer.borderWidth = 4
button.layer.borderColor = greenColor.cgColor
button.addTarget(self, action: #selector(handleCapture), for: .touchUpInside)
return button
}()
}
Try this code :
Swift v3.0
private func flashOn(device:AVCaptureDevice)
{
do{
if (device.hasTorch)
{
try device.lockForConfiguration()
device.torchMode = .on
device.flashMode = .on
device.unlockForConfiguration()
}
}catch{
//DISABEL FLASH BUTTON HERE IF ERROR
print("Device tourch Flash Error ");
}
}
//FOR FLASH OFF CODE
private func flashOff(device:AVCaptureDevice)
{
do{
if (device.hasTorch){
try device.lockForConfiguration()
device.torchMode = .off
device.flashMode = .off
device.unlockForConfiguration()
}
}catch{
//DISABEL FLASH BUTTON HERE IF ERROR
print("Device tourch Flash Error ");
}
}
// METHOD
//private let session = AVCaptureSession()
//MARK: FLASH UITLITY METHODS
func toggleFlash() {
var device : AVCaptureDevice!
if #available(iOS 10.0, *) {
let videoDeviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera, .builtInDuoCamera], mediaType: AVMediaTypeVideo, position: .unspecified)!
let devices = videoDeviceDiscoverySession.devices!
device = devices.first!
} else {
// Fallback on earlier versions
device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
}
if ((device as AnyObject).hasMediaType(AVMediaTypeVideo))
{
if (device.hasTorch)
{
self.session.beginConfiguration()
//self.objOverlayView.disableCenterCameraBtn();
if device.isTorchActive == false {
self.flashOn(device: device)
} else {
self.flashOff(device: device);
}
//self.objOverlayView.enableCenterCameraBtn();
self.session.commitConfiguration()
}
}
}
Swift 4
So there are two different behaviors to choose from in AVFoundation. One would be a capture device torch switch. Connect the torchSwitch action to some view and be sure to change CameraManager.shared.backDevice to your instance of the front or back device that provides the current input.
#IBAction func torchSwitch(_ sender: Any) {
guard let device = CameraManager.shared.backDevice else { return }
guard device.isTorchAvailable else { return }
do {
try device.lockForConfiguration()
device.torchMode = device.torchMode ? .off : .on
if device.torchMode == .on {
try device.setTorchModeOn(level: 0.7)
}
} catch {
debugPrint(error)
}
}
AVFoundation has deprecated device.flashMode
Now to set flash, declare a variable on camera or vc. The value here will be the default.
var flash: AVCaptureFlashMode = .off
Connect this action to some view
#IBAction func torchSwitch(_ sender: Any) { flash = flash ? .off : .on }
Then when you want to capture an image, use AVCapturePhotoOutput and prepare the photo settings. stillCameraOutput is an instance of AVCapturePhotoOutput.
let settings = AVCapturePhotoSettings()
settings.flashMode = flash
stillCameraOutput.capturePhoto(with: settings, delegate: self)
Swift 4 :
Following code is working fine for me
private enum FlashPhotoMode {
case on
case off
}
#IBOutlet weak var flashPhotoModeButton: UIButton!
#IBAction func toggleFlashPhotoMode(_ flashPhotoModeButton: UIButton ) {
sessionQueue.async {
self.flashPhotoMode = (self.flashPhotoMode == .on) ? .off : .on
let flashPhotoMode = self.flashPhotoMode
DispatchQueue.main.async {
if flashPhotoMode == .on {
self.flashPhotoModeButton.setBackgroundImage(UIImage(named: "flashON"), for: .normal)
print("flashON")
} else {
self.flashPhotoModeButton.setBackgroundImage(UIImage(named: "flashOFF"), for: .normal)
print("flashOFF")
}
}
}
}
#IBAction private func capturePhoto(_ photoButton: UIButton) {
................
.......................
if self.videoDeviceInput.device.isFlashAvailable {
if self.flashPhotoMode == .on {
photoSettings.flashMode = .on
print("FLASH ON ")
} else {
print("FLASH OFF ")
photoSettings.flashMode = .off
}
}
}
Thanks!

How to assign a different audio URL for each reusable cell in Swift

I have 2 (or more) reusable collectionView cell and each one have to play a different audio. My problem is that when the audio1 finish, audio2 file start in the same cell of the audio2. If I manually play on each cell there's no problem, but if I want to play all audio automatically one after the other, all audio are played in the same cell. How I can start the next audio in the next cell if the cell has not yet been created?
Here how I append to array:
func appendToArray() {
for (_, page) in self.resources.enumerate() {
for (index,resource) in page.enumerate() {
print("Passa di qui") // Qui passa
if resource.fileType() == .Audio {
S3Client.sharedInstance.downloadResource(resourceKey: resource.value, completion: { (success, file) in
// let files = String(file)
self.audioURLs.append(file)
/**
if self.audioURLs.count == self.resources.count {
// print("audioURLs \(self.audioURLs[index])")
MediaAudioPlayer.sharedInstance.queueTrack(self.audioURLs)
}
*/
})
}
}
}
}
This is the cellForItemAtIndexPath:
func collectionView(collectionView: UICollectionView, cellForItemAtIndexPath indexPath: NSIndexPath) -> UICollectionViewCell {
case .Audio:
let cell = collectionView.dequeueReusableCellWithReuseIdentifier(MediaAudioCell.kCellIdentifier, forIndexPath: indexPath) as! MediaAudioCell
cell.activityIndicator.startAnimating()
cell.activityIndicator.hidden = false
S3Client.sharedInstance.downloadResource(resourceKey: resource.value, completion: { (success, file) in
if success == true && file != nil {
cell.activityIndicator.stopAnimating()
cell.activityIndicator.hidden = true
cell.audioURL = file!
// Make slider indipendent from cell to another
cell.sliderAudio.tag = indexPath.row
cell.sliderAudio.addTarget(self, action: "sliderChange:", forControlEvents: .ValueChanged)
// print("ArrayURL: \(file)")
// print("CiaoCell : \(self.audioURLs.count)")
// print("Ciaoself.resources.countCell : \(self.resources.count)")
/**
if self.audioURLs.count == self.resources.count {
// print("audioURLs \(self.audioURLs[index])")
let item = self.audioURLs[indexPath.row] print("item: \(item)")
}
if self.audioURLs.count == self.resources.count {
// print("audioURLs \(self.audioURLs[index])")
// MediaAudioPlayer.sharedInstance.queueTrack(self.audioURLs)
}
*/
// Display total audio leinght
let asset = AVURLAsset(URL: file!, options: nil)
let audios = asset.tracksWithMediaType(AVMediaTypeAudio)
if let audios: AVAssetTrack = audios[0] {
let audioDuration:CMTime = audios.timeRange.duration
let seconds:Float64 = CMTimeGetSeconds(audioDuration)
cell.labelAudio.text = cell.stringFromTimeInterval(NSTimeInterval(seconds)) as String
}
}
})
return cell
}
This is part of cell's Class:
override func awakeFromNib() {
super.awakeFromNib()
// Partenza automatica, dopo 2secondi, se AccessibilitĂ  su ON
let delayTime = dispatch_time(DISPATCH_TIME_NOW, Int64(2 * Double(NSEC_PER_SEC)))
dispatch_after(delayTime, dispatch_get_main_queue()) {
if self.defaults.boolForKey("AutomaticStart") == true && self.defaults.boolForKey("goBackPressed") == false {
if let audioURL = self.audioURL {
// Set AVAudioSession for recording and playing at the same time
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSessionCategoryPlayback)
try session.setActive(true)
} catch _ {}
// If audio is playing, do not pass to next if cell is created, but continue to playing.
if MediaAudioPlayer.sharedInstance.player?.playing == true { // Se metto a 'false', ed elimino 'else', non parte in automatico.
} else {
MediaVideoPlayer.sharedInstance.stop()
MediaAudioPlayer.sharedInstance.playPauseAudio(audioURL: audioURL, delegate: self)
}
}
}
}
}
And this is the player class:
class MediaAudioPlayer: NSObject, AVAudioPlayerDelegate {
static let sharedInstance = MediaAudioPlayer()
private var delegate: MediaAudioPlayerDelegate?
var player: AVAudioPlayer?
private var lastURL: NSURL?
private var timer: NSTimer?
internal var sliderTouched: Bool = false
var tracks = Array<NSURL?>()
var currentTrackIndex = 0
override init() {
super.init()
}
// MARK: Setup
func playPauseAudio(audioURL url: NSURL, delegate: MediaAudioPlayerDelegate) {
self.delegate?.playing = true // Set default play button on last delegate
self.delegate = delegate // Save delegate
self.sliderTouched = false
// Setup as new only when this audio has not been already set up
if (self.lastURL == nil) || (url != self.lastURL) {
self.lastURL = url
self.setupAudioSession(category: AVAudioSessionCategoryPlayback)
do { // Setup Player
self.player = try AVAudioPlayer(contentsOfURL: url)
} catch _ {}
self.player?.delegate = self
self.player?.prepareToPlay()
timer = NSTimer.scheduledTimerWithTimeInterval(0.4, target: self, selector: #selector(MediaAudioPlayer.update), userInfo: nil, repeats: true)
}
// Play - Pause
if self.player?.playing == true {
self.player?.pause()
self.delegate?.playing = true
} else {
self.player?.play()
self.delegate?.playing = false
}
}
// Transform second to minute
func stringFromTimeInterval(interval: NSTimeInterval) -> NSString {
let ti = NSInteger(interval)
let seconds = ti % 60
let minutes = (ti / 60) % 60
return NSString(format: "%0.2d:%0.2d", minutes, seconds)
}
// MARK: Audio Session
private func setupAudioSession(category category: String) {
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(category)
try session.setActive(true)
} catch _ {}
}
// MARK: Stop
func stop() {
self.player?.stop()
self.player = nil // Deinit player
self.delegate?.playing = true
self.delegate = nil // Deinit delegate
self.timer?.invalidate(); self.timer = nil
self.lastURL = nil
}
// MARK: Playing
internal func playing() -> Bool {
if player != nil {
return player?.rate == 1.0 ? true : false
}
return false
}
// MARK: Seek
func seekToPosition(position position: Float) {
if let duration = self.player?.duration {
player?.currentTime = Double(position) * duration
self.delegate?.currentTimeAudio = stringFromTimeInterval((player?.currentTime)!) as String
}
}
func update() {
if sliderTouched == false {
if let currentTime = self.player?.currentTime, duration = player?.duration {
let time = Float(currentTime) / Float(duration)
self.delegate?.sliderPosition = time
self.delegate?.currentTimeAudio = stringFromTimeInterval((player?.currentTime)!) as String
}
}
}
// MARK: Delegate
var counter = 0
func audioPlayerDidFinishPlaying(player: AVAudioPlayer, successfully flag: Bool) {
print("Called")
self.lastURL = nil
self.delegate?.playing = true
/**
if flag == true {
nextSong(true)
}*/
/**
if ((counter + 1) == tracks.count) {
counter = 0
self.delegate?.playing = false
nextSong(false)
} else {
self.delegate?.playing = true
nextSong(true)
}
*/
}
}
Thank you!!