In my application, I am trying to recreate a live preview picture cropper like in the popular application "Photomath".
Initially, I divided my code into three different classes. The Live preview view (a UIView), an Overlay View (a UIView also), and the actual Cropper View.
Currently, my thought process is to add the CropperView into the Overlay and then the Overlay into the Live Preview View. So far that's working.
I have a UIPanGesture hooked up to the CropperView in order to have it move around eventually. However, here is where the problem resides.
The inside of the cropper is clear or less dark than the surrounding overlay. I've heard that you can achieve this using a mask but have not been successful.
Here is the code for the Live Preview View
class NotationCameraView : UIView {
private var session : AVCaptureSession!
private var stillImageOutput : AVCapturePhotoOutput!
private var videoPreviewLayer : AVCaptureVideoPreviewLayer!
private var previewCropper : OverlayCropper!
override init(frame: CGRect) {
super.init(frame: frame)
self.commonInitializer()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
self.commonInitializer()
}
private func commonInitializer() {
if AVCaptureDevice.authorizationStatus(for: .video) == .authorized {
self.setUpLivePreview()
self.setUpOverlayCropper()
print("Set up view")
} else {
AVCaptureDevice.requestAccess(for: .video) { (success) in
if success {
self.setUpLivePreview()
self.setUpOverlayCropper()
} else {
print("Give me your camera REEEEEEEEEEE")
}
}
}
}
/**
Sets up the live preview of the back camera
*/
private func setUpLivePreview() {
self.session = AVCaptureSession()
self.session.sessionPreset = .high
let backCamera = AVCaptureDevice.default(for: AVMediaType.video)
//MARK: Need to have an overlay view so that the user does not see the video preview frame being upscaled
do {
let input = try AVCaptureDeviceInput(device: backCamera!)
self.stillImageOutput = AVCapturePhotoOutput()
if self.session.canAddInput(input) && self.session.canAddOutput(self.stillImageOutput) {
self.session.addInput(input)
self.session.addOutput(self.stillImageOutput)
self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.videoPreviewLayer.videoGravity = .resizeAspectFill
self.videoPreviewLayer.connection?.videoOrientation = .portrait
self.layer.addSublayer(videoPreviewLayer)
//Have public methods for starting or stopping the capture session
DispatchQueue.global(qos: .userInitiated).async {
self.session.startRunning()
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.bounds
}
}
}
} catch {
print("Need to allow permission to the back camera")
}
}
/**
Sets up the overlay cropper for the live preview.
*/
private func setUpOverlayCropper() {
self.previewCropper = OverlayCropper(frame: self.bounds)
self.previewCropper.translatesAutoresizingMaskIntoConstraints = false
self.addSubview(self.previewCropper)
}
}
Here is the code for the Overlay.
class OverlayCropper : UIView {
private var cropperView : CropperView!
override init(frame: CGRect) {
super.init(frame: frame)
self.commonInitializer()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
self.commonInitializer()
}
private func commonInitializer() {
self.cropperView = CropperView(frame: CGRect(x: 50, y: 100, width: 100, height: 100))
self.cropperView.backgroundColor = UIColor.blue
self.cropperView.translatesAutoresizingMaskIntoConstraints = false
self.addSubview(cropperView)
self.backgroundColor = UIColor.black.withAlphaComponent(0.25)
}
}
and lastly here is the code for the CropperView
class CropperView : UIView {
override init(frame: CGRect) {
super.init(frame: frame)
self.commonInitializer()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
self.commonInitializer()
}
private func commonInitializer() {
let panGesture = UIPanGestureRecognizer(target: self, action: #selector(self.handlePanGesture(_:)))
self.addGestureRecognizer(panGesture)
self.makeMask()
}
#objc private func handlePanGesture(_ panGesture : UIPanGestureRecognizer) {
let translation = panGesture.translation(in: self)
//print(translation.x, translation.y)
self.center = CGPoint(x: self.center.x + translation.x, y: self.center.y + translation.y)
panGesture.setTranslation(CGPoint.zero, in: self)
}
private func makeMask() {
let path = CGMutablePath()
path.addRect(CGRect(origin: .zero, size: self.frame.size))
let maskLayer = CAShapeLayer()
maskLayer.backgroundColor = UIColor.clear.cgColor
maskLayer.path = path
maskLayer.fillRule = .evenOdd
self.layer.mask = maskLayer
self.clipsToBounds = true
}
}
Related
I'm displaying a video inside a UIView, everything works fine and Video Layer Bounds are the same of the UIView in which it is embedded.
The problem is that the video is not displayed correctly inside the view's bounds, I can only see a part of it (precisely the center).
So I searched and found out that there's a property of AVPlayerLayer which is supposed to solve this problem: .videoGravity
I implemented it with .resizeAspectFill but it doesn't change anything.
Here is the code:
class PlaceHolderVideoView : UIView{
var player = AVPlayer()
var playerLayer = AVPlayerLayer()
let containerImageView = UIImageView(image: #imageLiteral(resourceName: "VideoContainerView"), contentMode: .scaleAspectFit)
override init(frame: CGRect) {
super.init(frame: frame)
setUpUI()
setUpPlayer()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
fileprivate func setUpUI(){
clipsToBounds = true
translatesAutoresizingMaskIntoConstraints = false
addSubview(containerImageView)
containerImageView.clipsToBounds = true
containerImageView.fillSuperview()
}
fileprivate func setUpPlayer(){
let urlPathString = Bundle.main.path(forResource: "dance", ofType: "mp4")
if let videoURL = urlPathString{
let url = URL(fileURLWithPath: videoURL)
player = AVPlayer(url: url)
playerLayer = AVPlayerLayer(player: player)
playerLayer.cornerRadius = 20
playerLayer.bounds = self.containerImageView.bounds
print(self.containerImageView.bounds)
playerLayer.videoGravity = .resizeAspectFill
self.layer.masksToBounds = true
self.layer.cornerRadius = 20
self.layer.addSublayer(playerLayer)
player.play()
}
}
}
To solve this problem I needed to call layoutSublayers method like this:
override func layoutSublayers(of layer: CALayer) {
super.layoutSublayers(of: layer)
playerLayer.frame = self.bounds
}
this method can set the bounds of the playerLayer correctly
I have an inheritance problem that I'm trying to solve. Typically, I'd just use multi-inheritance here, but Swift doesn't really do that.
Custom UIView
import UIKit
class ValidationView: UIView {
var required:Bool = false
var validRegex:String? = nil
var requiredLbl:UILabel?
private var requiredColor:UIColor = UIColor.red
private var requiredText:String = "*"
private var requiredFont:UIFont = UIFont.systemFont(ofSize: 16.0, weight: UIFont.Weight.bold)
override init(frame: CGRect) {
super.init(frame: frame)
self.setupValidationViews()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
self.setupValidationViews()
}
private func setupValidationViews() {
self.requiredLbl = UILabel(frame: CGRect(x: self.frame.width - 30, y: 30, width: 20, height: 20))
self.styleRequiredLabel()
self.addSubview(self.requiredLbl!)
}
func styleRequiredLabel(color:UIColor?, text:String?, font:UIFont?) {
self.requiredColor = color ?? self.requiredColor
self.requiredText = text ?? self.requiredText
self.requiredFont = font ?? self.requiredFont
self.styleRequiredLabel()
}
private func styleRequiredLabel() {
self.requiredLbl?.textColor = self.requiredColor
self.requiredLbl?.text = self.requiredText
self.requiredLbl?.font = self.requiredFont
}
}
Custom UITextField
import Foundation
import UIKit
#IBDesignable open class CustomTextField: UITextField {
#IBInspectable public var borderWidth: CGFloat = 2.0 {
didSet {
layer.borderWidth = borderWidth
}
}
#IBInspectable public var borderColor: UIColor = UIColor.lightGray {
didSet {
layer.borderColor = borderColor.cgColor
}
}
#IBInspectable public var cornerRadius: CGFloat = 4.0 {
didSet {
layer.cornerRadius = cornerRadius
layer.masksToBounds = true
}
}
}
I want that Custom UITextField to also be a ValidationView. I know I could do a protocol and extension and then have my CustomTextField implement that protocol, but that doesn't allow for init overrides. I'd rather not have to change the inits on ever view that implements ValidationView.
Something like this can be accomplished using #arturdev answer. I ended up with this:
import UIKit
class ValidatableProperties {
var required:Bool
var validRegex:String?
var requiredColor:UIColor
var requiredText:String
var requiredFont:UIFont
init(required:Bool, validRegex:String?, requiredColor:UIColor, requiredText:String, requiredFont:UIFont) {
self.required = required
self.validRegex = validRegex
self.requiredText = requiredText
self.requiredColor = requiredColor
self.requiredFont = requiredFont
}
}
protocol Validatable : UIView {
var validatableProperties:ValidatableProperties! { get set }
var requiredLbl:UILabel! { get set }
func setupValidationDefaults()
func setupValidationViews(frame:CGRect)
func styleRequiredLabel(color:UIColor?, text:String?, font:UIFont?)
}
extension Validatable {
func setupValidationDefaults() {
let props = ValidatableProperties(required: false, validRegex: nil, requiredColor: UIColor.red, requiredText: "*", requiredFont: UIFont.systemFont(ofSize: 16.0, weight: .bold))
self.validatableProperties = props
}
func setupValidationViews(frame:CGRect) {
self.requiredLbl = UILabel(frame: CGRect(x: frame.width, y: 0, width: 20, height: 20))
self.styleRequiredLabel()
self.addSubview(self.requiredLbl)
}
func styleRequiredLabel(color:UIColor?, text:String?, font:UIFont?) {
self.validatableProperties.requiredColor = color ?? self.validatableProperties.requiredColor
self.validatableProperties.requiredText = text ?? self.validatableProperties.requiredText
self.validatableProperties.requiredFont = font ?? self.validatableProperties.requiredFont
self.styleRequiredLabel()
}
private func styleRequiredLabel() {
self.requiredLbl.textColor = self.validatableProperties.requiredColor
self.requiredLbl.text = self.validatableProperties.requiredText
self.requiredLbl.font = self.validatableProperties.requiredFont
}
}
open class ValidationTextField:UITextField, Validatable {
var requiredLbl: UILabel!
var validatableProperties: ValidatableProperties!
override public init(frame: CGRect) {
super.init(frame: frame)
self.setupValidationDefaults()
self.setupValidationViews(frame: frame)
}
public required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
self.setupValidationDefaults()
self.setupValidationViews(frame: self.frame)
}
}
But this requires extending all classes you want validatable into their own custom classes, needing to overwrite the inits every time and call the methods. It works, but it's not ideal and, while not exactly anti-pattern inheritance, certainly has some code-smell to it.
You should make ValidationView as protocol instead of class, and conform your custom classes to that protocol.
ValidatableView.swift
import UIKit
fileprivate var requiredColor = UIColor.red
fileprivate var requiredText = "*"
fileprivate var requiredFont = UIFont.systemFont(ofSize: 16.0, weight: UIFont.Weight.bold)
fileprivate struct AssociatedKeys {
static var lblKey = "_lblKey_"
}
protocol ValidatableView: class {
var required: Bool {get}
var validRegex: String? {get}
var requiredLbl: UILabel? {get}
}
extension ValidatableView where Self: UIView {
var required: Bool {
return false
}
var validRegex: String? {
return nil
}
var requiredLbl: UILabel? {
get {
return objc_getAssociatedObject(self, &AssociatedKeys.lblKey) as? UILabel
}
set {
objc_setAssociatedObject(self, &AssociatedKeys.lblKey, newValue, .OBJC_ASSOCIATION_RETAIN)
}
}
func setupValidation() {
self.requiredLbl = UILabel(frame: CGRect(x: self.frame.width - 30, y: 30, width: 20, height: 20))
self.requiredLbl?.autoresizingMask = .flexibleWidth
self.styleRequiredLabel()
self.addSubview(self.requiredLbl!)
}
func styleRequiredLabel(color:UIColor? = requiredColor, text:String? = requiredText, font:UIFont? = requiredFont) {
self.requiredLbl?.textColor = requiredColor
self.requiredLbl?.text = requiredText
self.requiredLbl?.font = requiredFont
}
}
CustomTextField.swift
#IBDesignable open class CustomTextField: UITextField {
#IBInspectable public var borderWidth: CGFloat = 2.0 {
didSet {
layer.borderWidth = borderWidth
}
}
#IBInspectable public var borderColor: UIColor = UIColor.lightGray {
didSet {
layer.borderColor = borderColor.cgColor
}
}
#IBInspectable public var cornerRadius: CGFloat = 4.0 {
didSet {
layer.cornerRadius = cornerRadius
layer.masksToBounds = true
}
}
public override init(frame: CGRect) {
super.init(frame: frame)
setupValidation()
}
public required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
setupValidation()
}
}
extension CustomTextField: ValidatableView { //<- Magic line :)
}
You can create an instance of ValidationView when you instantiate CustomTextField.
Something like this:
#IBDesignable open class CustomTextField: UITextField {
var validationView: ValidationView
override init(frame: CGRect) {
super.init(frame: .zero)
self.validationView = ValidationView()
}
}
I'm building camera App.
I want to preview and photo frame 1:1.
But how can I do that?
I've tried previewView frame change.
self.previewView?.frame.size = CGSize(width: 300, height: 300)
But It does not working.
class CameraViewController: UIViewController {
// MARK: - Properties
// MARK: Declared
var captureSession: AVCaptureSession?
var captureOutput: AVCapturePhotoOutput?
// MARK: IBOutlet
#IBOutlet weak var previewView: PreviewView!
// MARK: - Methods
// MARK: View Life Cycle
override func viewDidLoad() {
super.viewDidLoad()
self.configureInput()
self.configureOutput()
self.configurePreview()
self.runCamera()
}
// MARK: Configure
private func configureInput() {
self.captureSession = AVCaptureSession()
self.captureSession?.beginConfiguration()
self.captureSession?.sessionPreset = .hd4K3840x2160
guard let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) else { return }
guard let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice), self.captureSession?.canAddInput(videoDeviceInput) == true else { return }
self.captureSession?.addInput(videoDeviceInput)
}
private func configureOutput() {
let photoOutput = AVCapturePhotoOutput()
self.captureOutput = photoOutput
guard self.captureSession?.canAddOutput(photoOutput) == true else { return }
self.captureSession?.sessionPreset = .photo
self.captureSession?.addOutput(photoOutput)
self.captureSession?.commitConfiguration()
}
private func configurePreview() {
self.previewView?.videoPreviewlayer.session = self.captureSession
}
private func runCamera() {
self.captureSession?.startRunning()
}
}
This is my code.
I made this read after apple's article. (https://developer.apple.com/documentation/avfoundation/cameras_and_media_capture/setting_up_a_capture_session)
You could use this to change the preview layer frame to make it fill your preview view:
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
previewLayer.frame = cameraView.bounds
previewLayer.videoGravity = .resizeAspectFill
}
I have NSCollectionView with items inside.
I have xib for NSCollectionViewItem, with binding to representedObject.image
My issue is following. Sometimes my items should display "Empty image", till I pick image for that, through my controls. When I pick image - I store that in representedObject's .image property, binding updates the view, all is cool.
I can rotate my imageview's inside the collection. all works cool, except the situation above, when I pick image for empty one, view is being updated, binding shows proper image, but if I try to rotate the view after that - it doesnt work properly. It is being shown only in rotation where it was added, but if I rotate to a different angle - it disappears. Rotation is being observed thorugh KVO.
This is the code what I have:
class CollectionViewItem: NSCollectionViewItem {
//Init
override init?(nibName nibNameOrNil: String?, bundle nibBundleOrNil: Bundle?) {
super.init(nibName: nibNameOrNil, bundle: nibBundleOrNil)
self.addObserver(self, forKeyPath: "modelRepresentation.Rotation", options: [.new, .old], context: nil)
}
required init?(coder: NSCoder) {
super.init(coder: coder)
}
//Properties
#objc dynamic var modelRepresentation : Artwork?
override var highlightState: NSCollectionViewItemHighlightState {
get {
return super.highlightState
}
set(newHighlightState) {
super.highlightState = newHighlightState
// Relay the newHighlightState to our AAPLSlideCarrierView.
guard let carrierView = self.view as? LibraryViewItemCarrierView else {return}
carrierView.highlightState = newHighlightState
}
}
override var isSelected: Bool {
get {
return super.isSelected
}
set {
super.isSelected = newValue
guard let carrierView = self.view as? LibraryViewItemCarrierView else {return}
carrierView.selected = newValue
}
}
override var representedObject: Any? {
get {
return super.representedObject as AnyObject?
}
set(newRepresentedObject) {
super.representedObject = newRepresentedObject
if let model = newRepresentedObject as? Artwork {
modelRepresentation = model
imageView?.rotate(byDegrees: CGFloat((model.Rotation)))
}
}
}
// 2
override func viewDidLoad() {
super.viewDidLoad()
view.wantsLayer = true
view.layer?.backgroundColor = NSColor(red: 15/255, green: 34/255, blue: 42/255, alpha: 1).cgColor
view.layer?.borderWidth = 0.0
view.layer?.borderColor = NSColor.lightGray.cgColor
}
override func prepareForReuse() {
super.prepareForReuse()
imageView?.boundsRotation = 0
}
func setHighlight(_ selected: Bool) {
view.layer?.borderWidth = selected ? 3.0 : 0.0
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
let kind = change![NSKeyValueChangeKey.kindKey]! as! UInt
if(keyPath == "modelRepresentation.Rotation")
{
if kind == NSKeyValueChange.setting.rawValue {
guard let newVal = change![.newKey]! as? Int,
let oldVal = change![.oldKey]! as? Int else {return}
let delta = newVal - oldVal
imageView?.rotate(byDegrees: CGFloat(delta))
}
}
}
}
Oops, found my bug, on CollectionView and CvItem all is ok. I was just setting an image twice in another place, and this was an issue.
Thank all who viewed that topic!
I'm missing something here. I got Liveness to work in Interface Builder. I can draw the image in the NSView with NSImage.draw rect. So the image loads correctly. However when I put this inside a CALayer it doesn't show up.
Did I miss something about behaviour on NSView? CALayer? Layer Hosting? Or something else?
Here's the code of the view:
import Foundation
import AppKit
import QuartzCore
#IBDesignable public class CircularImageView: NSView {
var imageLayer: CALayer?
#IBInspectable public var edgeInset: CGFloat = 10
public var image: NSImage? {
didSet {
if let newImage = image {
imageLayer?.contents = newImage
}
}
}
// MARK: New in this class
private func prepareLayer() {
self.layer = CALayer()
self.wantsLayer = true
}
private func drawImage() {
// What am I doing wrong here?
var newImageLayer = CALayer()
newImageLayer.contentsGravity = kCAGravityResizeAspect
if let imageToSet = image {
newImageLayer.contents = imageToSet
}
let insetBounds = CGRectInset(self.bounds, edgeInset, edgeInset)
newImageLayer.frame = insetBounds
newImageLayer.backgroundColor = NSColor(calibratedWhite: 0.8, alpha: 1).CGColor
self.layer!.addSublayer(newImageLayer)
imageLayer = newImageLayer
}
private func test(){
image?.drawInRect(self.bounds)
}
// MARK: NSView stuff
public override init(frame frameRect: NSRect) {
super.init(frame: frameRect)
prepareLayer()
}
public required init?(coder: NSCoder) {
super.init(coder: coder)
prepareLayer()
}
public override func viewWillDraw() {
super.viewWillDraw()
drawImage()
}
public override func prepareForInterfaceBuilder() {
super.prepareForInterfaceBuilder()
// Load default test image.
println("\(self): prepareForInterfaceBuilder")
let processInfo = NSProcessInfo.processInfo()
let environment = processInfo.environment
let projectSourceDirectories : AnyObject = environment["IB_PROJECT_SOURCE_DIRECTORIES"]!
let directories = projectSourceDirectories.componentsSeparatedByString(":")
if directories.count != 0 {
let firstPath = directories[0] as! String
let imagePath = firstPath.stringByAppendingPathComponent("CircularView/Bz1dSvR.jpg")
let image = NSImage(contentsOfFile: imagePath)
image!.setName("Test Image")
self.image = image
}
}
}
Thanks in advance.
Okay, guys I found the answer.
I did two things wrong in this code. I was working on a layer hosting view and I simply needed a layer-backed view. I didn't new there was a difference. And before adding an NSImage to the CALayer's contents I needed to embrace it with lockFocus() and unlockFocus().
Here's the full code that solved the issue.
import Foundation
import AppKit
#IBDesignable public class CircularImageView: NSView {
var imageLayer: CALayer?
#IBInspectable public var edgeInset: CGFloat = 10
public var image: NSImage? {
didSet {
if let newImage = image {
imageLayer?.contents = newImage
}
}
}
// MARK: New in this class
private func prepareLayer() {
// I had to remove my own created layer.
self.wantsLayer = true
}
private func drawImage() {
var newImageLayer = CALayer()
newImageLayer.contentsGravity = kCAGravityResizeAspect
if let imageToSet = image {
// I didn't lock the focus on the imageToSet.
imageToSet.lockFocus()
newImageLayer.contents = imageToSet
// I didn't unlock the focus either.
imageToSet.unlockFocus()
}
let insetBounds = CGRectInset(self.bounds, edgeInset, edgeInset)
newImageLayer.frame = insetBounds
newImageLayer.backgroundColor = NSColor(calibratedWhite: 0.8, alpha: 1).CGColor
self.layer!.addSublayer(newImageLayer)
imageLayer = newImageLayer
}
// MARK: NSView stuff
public override init(frame frameRect: NSRect) {
super.init(frame: frameRect)
prepareLayer()
}
public required init?(coder: NSCoder) {
super.init(coder: coder)
prepareLayer()
}
public override func viewWillDraw() {
super.viewWillDraw()
drawImage()
}
public override func prepareForInterfaceBuilder() {
super.prepareForInterfaceBuilder()
// Load default test image.
println("\(self): prepareForInterfaceBuilder")
let processInfo = NSProcessInfo.processInfo()
let environment = processInfo.environment
let projectSourceDirectories : AnyObject = environment["IB_PROJECT_SOURCE_DIRECTORIES"]!
let directories = projectSourceDirectories.componentsSeparatedByString(":")
if directories.count != 0 {
let firstPath = directories[0] as! String
let imagePath = firstPath.stringByAppendingPathComponent("CircularView/Bz1dSvR.jpg")
let image = NSImage(contentsOfFile: imagePath)
image!.setName("Test Image")
self.image = image
}
}
}