I am trying to create a custom UIButton that i can use throughout my application. I would like to apply a gradient as its background however the gradient is not showing up using this very simple Code.
It would be very helpful if someone could point out my mistake in the code below.
class GradientBtn: UIButton {
let gradientLayer = CAGradientLayer()
override init(frame: CGRect) {
super.init(frame: frame)
themeConfig()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
themeConfig()
}
private func themeConfig() {
//shadow
layer.shadowOffset = CGSize.zero
layer.shadowColor = UIColor.gray.cgColor
layer.shadowOpacity = 1.0
//titletext
setTitleColor(Theme.colorWhite, for: .normal)
titleLabel?.font = UIFont(name: Theme.fontAvenir, size: 18)
//rounded corners
layer.cornerRadius = frame.size.height / 2
//gradient
gradientLayer.locations = [0.0, 1.0]
gradientLayer.colors = [Theme.colorlightBlue, Theme.colorMidBlue]
gradientLayer.startPoint = CGPoint(x: 0.0, y: 0.0)
gradientLayer.endPoint = CGPoint(x: 1.0, y: 1.0)
gradientLayer.frame = bounds
layer.insertSublayer(gradientLayer, at: 0)
}
}
Replace
gradientLayer.colors = [Theme.colorlightBlue, Theme.colorMidBlue]
with
gradientLayer.colors = [Theme.colorlightBlue.cgColor, Theme.colorMidBlue.cgColor]
And it's always good to add
override func layoutSubviews() {
super.layoutSubviews()
gradientLayer.frame = self.bounds
}
Sh_Khan is right about the CGColor references, but a few additional refinements:
Rather than adding the gradient as a sublayer, make it the layerClass of the UIButton. This makes animations much better (e.g. if the button changes size on device rotation).
When you do this, remove all attempts to set the frame of the gradientLayer.
In layoutSubviews, you’ll no longer need to update the gradientLayer.frame, so remove that entirely, but you do want to put your corner rounding there.
It’s extremely unlikely to ever matter, but when you set layer.cornerRadius (or any property of the view, its layer, or that layer's sublayers), you should never refer to self.frame. The frame of the view is the CGRect in the superview’s coordinate system. Only refer to the view’s bounds within the subclass; never its frame.
Thus:
class GradientBtn: UIButton {
override class var layerClass: AnyClass { return CAGradientLayer.self }
private var gradientLayer: CAGradientLayer { return layer as! CAGradientLayer }
override init(frame: CGRect = .zero) {
super.init(frame: frame)
themeConfig()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
themeConfig()
}
private func themeConfig() {
//shadow
layer.shadowOffset = .zero
layer.shadowColor = UIColor.gray.cgColor
layer.shadowOpacity = 1
//titletext
setTitleColor(Theme.colorWhite, for: .normal)
titleLabel?.font = UIFont(name: Theme.fontAvenir, size: 18)
//gradient
gradientLayer.locations = [0, 1]
gradientLayer.colors = [Theme.colorlightBlue, Theme.colorMidBlue].map { $0.cgColor }
gradientLayer.startPoint = CGPoint(x: 0, y: 0)
gradientLayer.endPoint = CGPoint(x: 1, y: 1)
}
override func layoutSubviews() {
super.layoutSubviews()
layer.cornerRadius = min(bounds.height, bounds.width) / 2
}
}
Related
I use SnapKit in my project and trying to add gradient on my button
I have extension:
extension UIButton {
public func setGradientColor(colorOne: UIColor, colorTwo: UIColor) {
let gradientLayer = CAGradientLayer()
gradientLayer.frame = bounds
gradientLayer.colors = [colorOne.cgColor, colorTwo.cgColor]
gradientLayer.locations = [0.0, 1.0]
gradientLayer.startPoint = CGPoint(x: 0.0, y: 0.0)
gradientLayer.endPoint = CGPoint(x: 1.0, y: 1.0)
layer.insertSublayer(gradientLayer, at: 0)
}
}
i have button config where i am trying to use gradient:
private var playersButton: UIButton = {
let button = UIButton(type: .custom)
button.setGradientColor(colorOne: .red, colorTwo: .blue)
button.frame = button.layer.frame
return button
}()
and SnapKit here
playersButton.snp.makeConstraints { make in
make.leading.equalToSuperview().inset(60)
make.trailing.equalToSuperview().inset(60)
make.bottom.equalTo(startGameButton).inset(100)
make.height.equalTo(screenHeight/12.82)
}
The problem is i have not result of it, i dont see gradientm but if i delete snapKit config and will use setGradient extension in viewDidLoad it works well!
Example:
private var playersButton: UIButton = {
let button = UIButton(type: .custom)
button.frame = CGRect(x: 0, y: 0, width: 200, height: 200)
return button
}()
override func viewDidLoad() {
super.viewDidLoad()
playersButton.setGradientColor(colorOne: .blue, colorTwo: .red)
}
How do i change my code to use first method? Thank you
The difference is that when using SnapKit you're using Auto-Layout, so your button's frame is not set when you call button.setGradientColor(colorOne: .red, colorTwo: .blue).
The result is that your gradient layer ends up with a frame size of Zero - so you don't see it.
You will likely find it much easier and more reliable to use a button subclass like this (simple example based on the code you posted):
class MyGradientButton: UIButton {
let gradLayer = CAGradientLayer()
override init(frame: CGRect) {
super.init(frame: frame)
commonInit()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
commonInit()
}
func commonInit() {
layer.insertSublayer(gradLayer, at: 0)
}
public func setGradientColor(colorOne: UIColor, colorTwo: UIColor) {
gradLayer.colors = [colorOne.cgColor, colorTwo.cgColor]
gradLayer.locations = [0.0, 1.0]
gradLayer.startPoint = CGPoint(x: 0.0, y: 0.0)
gradLayer.endPoint = CGPoint(x: 1.0, y: 1.0)
}
override func layoutSubviews() {
super.layoutSubviews()
gradLayer.frame = bounds
}
}
Then change your button declaration to:
private var playersButton: MyGradientButton = {
let button = MyGradientButton()
button.setGradientColor(colorOne: .red, colorTwo: .blue)
return button
}()
Now, whether you set its frame explicitly, or if you use auto-layout (normal syntax or with SnapKit), the gradient layer will automatically adjust whenever the button's frame changes.
I am trying to create an overlay view, that has a "cutout" part that comes from a frame that is passed to the view, that size and position of that passed frame will change upon creation of the view. And in that "cutout" part I am expecting to see the content that is under that overlay view. Tried to set border to a rounded rectangle that is added to a CGMutablePath, but no luck.
The expected result is something like this:
The code I currently have in my UIView class, without tried solutions as I can't seem to get them to work properly. This current code displays the expected result, but without the red border:
override func draw(_ rect: CGRect) {
super.draw(rect)
UIColor.blue.setFill()
UIRectFill(rect)
let shapeLayer = CAShapeLayer()
let path = CGMutablePath()
// frame that will change position on the screen
if let frame = changingFrame {
path.addRoundedRect(in: frame, cornerWidth: 16, cornerHeight: 16)
}
path.addRect(bounds)
shapeLayer.path = path
shapeLayer.fillRule = CAShapeLayerFillRule.evenOdd
self.layer.mask = shapeLayer
}
I have tried solutions from here, here, but no luck as CAShapeLayer for border just overlays existing one.
What can I do differently to achieve the expected result? Thanks!
try this ⭐️
If all you want to do is create a rounded rectangle, then you can simply use.
let rectangle = CGRect(x: 0, y: 0, width: 100, height: 100)
let path = UIBezierPath(roundedRect: rectangle, cornerRadius: 20)
view.clipsToBounds = true
view.layer.cornerRadius = 10.0
let border = CAShapeLayer()
border.path = UIBezierPath(roundedRect:view.bounds, cornerRadius:10.0).cgPath
border.frame = view.bounds
border.fillColor = nil
border.strokeColor = UIColor.purple.cgColor
border.lineWidth = borderWidth * 2.0 // doubled since half will be clipped
border.lineDashPattern = [1.0]
view.layer.addSublayer(border)
One approach is to use two sublayers... a "cutout" layer and a "border" layer.
Use the same path for the cutout and the border shape, setting the line width and stroke color for the "outline".
Here's an example -- including making it #IBDesignable with a few #IBInspectable properties:
#IBDesignable
class BorderedCutoutView: UIView {
#IBInspectable
var bkgColor: UIColor = .systemBlue {
didSet {
setNeedsLayout()
}
}
#IBInspectable
var brdColor: UIColor = .white {
didSet {
setNeedsLayout()
}
}
#IBInspectable
var brdWidth: CGFloat = 1 {
didSet {
setNeedsLayout()
}
}
#IBInspectable
var radius: CGFloat = 20 {
didSet {
setNeedsLayout()
}
}
#IBInspectable
var horizInset: CGFloat = 40.0 {
didSet {
setNeedsLayout()
}
}
#IBInspectable
var vertInset: CGFloat = 60.0 {
didSet {
setNeedsLayout()
}
}
private let cutoutLayer = CAShapeLayer()
private let borderLayer = CAShapeLayer()
override init(frame: CGRect) {
super.init(frame: frame)
commonInit()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
commonInit()
}
override func prepareForInterfaceBuilder() {
super.prepareForInterfaceBuilder()
backgroundColor = .clear
}
private func commonInit() -> Void {
backgroundColor = .clear
layer.addSublayer(cutoutLayer)
layer.addSublayer(borderLayer)
}
override func layoutSubviews() {
super.layoutSubviews()
let path = UIBezierPath(rect: bounds)
let cp = UIBezierPath(roundedRect: bounds.insetBy(dx: horizInset, dy: vertInset), cornerRadius: radius)
path.append(cp)
path.usesEvenOddFillRule = true
cutoutLayer.path = path.cgPath
cutoutLayer.fillRule = .evenOdd
cutoutLayer.fillColor = bkgColor.cgColor
borderLayer.path = cp.cgPath
borderLayer.fillColor = UIColor.clear.cgColor
borderLayer.lineWidth = brdWidth
borderLayer.strokeColor = brdColor.cgColor
}
}
This example uses horizontal and vertical "inset" values to center the cutout in the view.
Result:
so I am implementing a simple gradient on the lower half a UIView subclass like the following
however, every time I leave the app and reenter it, the gradient gets darker, and so forth. How do I make it so it just stays the same shade?
Here is my code for the gradient. I don't know if its correct to place it in layoutSubviews, but I couldn't get it to work just by calling the gradient code directly, such as in viewDidLoad for example:
override func layoutSubviews() {
let gradientLayer = CAGradientLayer()
gradientLayer.type = .axial
gradientLayer.colors = [UIColor.clear.cgColor, UIColor.black.cgColor]
gradientLayer.frame = bounds
gradientLayer.startPoint = CGPoint(x: 0, y: 0.5)
gradientLayer.endPoint = .init(x: 0, y: 1)
gradientLayer.zPosition = 1
gradientLayer.opacity = 0.5
layer.addSublayer(gradientLayer)
}
layoutSubviews() gets called multiple times, so your gradient is getting added repeatedly.
layoutSubviews() is not really the place to be adding this gradient. The reason adding it in viewDidLoad() was not working for you is because the bounds of the view haven't been established yet. Go ahead and set up your gradient in viewDidLoad(). Keep the gradient as a property, and use viewDidLayoutSubviews() to update the frame.
class ViewController: UIViewController {
let gradientLayer = CAGradientLayer()
override func viewDidLoad() {
super.viewDidLoad()
gradientLayer.type = .axial
gradientLayer.colors = [UIColor.clear.cgColor, UIColor.black.cgColor]
gradientLayer.startPoint = CGPoint(x: 0, y: 0.5)
gradientLayer.endPoint = .init(x: 0, y: 1)
gradientLayer.zPosition = 1
gradientLayer.opacity = 0.5
view.layer.addSublayer(gradientLayer)
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
gradientLayer.frame = view.bounds
}
}
For a view that is a subclass of UIView, you'd handle it like this:
class MyView: UIView {
let gradientLayer = CAGradientLayer()
override init(frame: CGRect) {
super.init(frame: frame)
addGradient()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
addGradient()
}
func addGradient() {
gradientLayer.type = .axial
gradientLayer.colors = [UIColor.clear.cgColor, UIColor.black.cgColor]
gradientLayer.startPoint = CGPoint(x: 0, y: 0.5)
gradientLayer.endPoint = .init(x: 0, y: 1)
gradientLayer.zPosition = 1
gradientLayer.opacity = 0.5
layer.addSublayer(gradientLayer)
}
override func layoutSubviews() {
super.layoutSubviews()
gradientLayer.frame = bounds
}
}
I created a custom UIView and then added it to a number of viewControllers. The view controller become glitchy and do not render in the storyboard. I just see the view controllers' outlines and no content. If I remove the custom view then all goes back to normal.
Can someone please take a look at my custom view code and see if I am doing anything wrong? Thank you so much.
import UIKit
#IBDesignable
class ProgressView: UIView {
#IBOutlet var contentView: UIView!
#IBInspectable var percentageProgress: CGFloat = 0.0 {
didSet {
self.setGradient()
}
}
override init(frame: CGRect) {
super.init(frame: frame)
commonInit()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
commonInit()
}
private func commonInit() {
Bundle.main.loadNibNamed("ProgressView", owner: self, options: nil)
addSubview(contentView)
contentView.frame = self.bounds
contentView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
setGradient()
}
private func setGradient() {
if let sublayers = self.contentView.layer.sublayers {
for sublayer in sublayers {
sublayer.removeFromSuperlayer()
}
}
let gradientLayer = CAGradientLayer()
gradientLayer.colors = [UIColor(#colorLiteral(red: 0.3935321569, green: 0.3986310661, blue: 0.6819975972, alpha: 1)).cgColor, UIColor(#colorLiteral(red: 0.1705667078, green: 0.649338007, blue: 0.5616513491, alpha: 1)).cgColor]
gradientLayer.startPoint = CGPoint(x: 0.0, y: 0.5)
gradientLayer.endPoint = CGPoint(x: 1.0, y: 0.5)
gradientLayer.cornerRadius = contentView.frame.height / 2
contentView.layer.cornerRadius = contentView.frame.height / 2
self.contentView.layer.insertSublayer(gradientLayer, at: 0)
gradientLayer.frame = CGRect(x: contentView.frame.minX, y: contentView.frame.minY,
width: self.contentView.frame.maxX * (percentageProgress ?? 1.0), height:
contentView.frame.maxY)
self.contentView.setNeedsDisplay()
}
}
I have a UIView. My UIView has a layer. I added another layer to it. I need to cut a circle in this layer when the user moves his finger to create a drawing effect. I will leave my code, but it is not correct, as it creates a circle once.
class CustomImageView: UIImageView {
private var recognizer: UIPanGestureRecognizer!
private var maskLayer: CALayer!
override init(frame: CGRect) {
super.init(frame: frame)
setup()
}
private func setup() {
self.isUserInteractionEnabled = true
recognizer = UIPanGestureRecognizer(target: self, action: #selector(swipeGesture))
self.addGestureRecognizer(recognizer)
maskLayer = CALayer()
maskLayer.backgroundColor = UIColor.black.withAlphaComponent(0.6).cgColor
maskLayer.frame = self.bounds
self.layer.insertSublayer(maskLayer, at: 0)
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
setup()
}
private func crateMask(location: CGPoint, layer: CALayer) {
let maskFrame = CGRect(origin: CGPoint(x: location.x - 25 , y: location.y - 25 ), size: CGSize(width: 50, height: 50))
layer.createMask(maskRect: maskFrame, invert: true)
}
#objc func swipeGesture(sender: UIPanGestureRecognizer) {
let location = sender.location(in: self)
print(location)
crateMask(location: location, layer: self.maskLayer )
}
}
extension CALayer {
func createMask(maskRect: CGRect, invert: Bool = false) {
print(maskRect)
let maskLayer = CAShapeLayer()
let path = CGMutablePath()
if (invert) {
path.addRect(self.bounds)
}
let cornerRadius = maskRect.height / 2
path.addRoundedRect(in: maskRect, cornerWidth: cornerRadius, cornerHeight: cornerRadius)
maskLayer.path = path
if (invert) {
maskLayer.fillRule = CAShapeLayerFillRule.evenOdd
}
self.mask = maskLayer
}
}
Here is the screen shot of the UI: https://prnt.sc/ozki6s
Any help will be appreciated! Thanks in advance.