UIColor code in a variable in swift - swift

I want to set the background color of view through a parameter so that when 1 want to change it I can change it. The value of the parameter is coming from database. My current code is like follows:
loadingView.backgroundColor = UIColor(red:0.99, green:0.75, blue:0.14, alpha:1.0)
I want to change the above code as follows:
loadingView.backgroundColor = UIColor(red:a, green:b, blue:c, alpha:d)
or
loadingView.backgroundColor = UIColor(hex)
Where a, b, c, d and hex are variables and all of these have a value.

If hex is an Int and not a String you can use this extension:
extension UIColor {
convenience init(hex: Int, alpha: CGFloat) {
let red = CGFloat((hex & 0xFF0000) >> 16) / 255.0
let green = CGFloat((hex & 0xFF00) >> 8) / 255.0
let blue = CGFloat((hex & 0xFF)) / 255.0
self.init(red:red, green:green, blue:blue, alpha:alpha)
}
}
Used like this:
UIColor(0x00ff00, alpha: 1.0)
UIColor(hex, alpha: 1.0)

extension String {
subscript(range: Range<Int>) -> String {
return substringWithRange(advance(startIndex,range.startIndex)..<advance(startIndex,range.endIndex))
}
}
extension UIColor {
convenience init(htmlColor:String, alpha: Double) {
self.init(red: CGFloat( strtoul(htmlColor[0...1], nil, 16) )/255.0,
green: CGFloat( strtoul(htmlColor[2...3], nil, 16) )/255.0,
blue: CGFloat( strtoul(htmlColor[4...5], nil, 16) )/255.0,
alpha: CGFloat( alpha )
)
}
}
Testing
let greenColorHEX = "00ff00"
let greenColor = UIColor(htmlColor: greenColorHEX, alpha: 1)
let a:CGFloat = 1
let b:CGFloat = 1
let c:CGFloat = 0
let d:CGFloat = 1
let yellowColor = UIColor(red: a, green: b, blue: c, alpha: d)

Related

Swift 5.4 hex to NSColor

I am developing a program for macOS.
I need to convert a hex color to NSColor.
I looked at the proposed solutions here:
Convert Hex Color Code to NSColor
How to convert hex to NSColor?
But none of it works correctly with Xcode 12.5.1.
At the moment I did this, it works correctly:
extension NSObject {
func RGB(r:CGFloat, g:CGFloat, b:CGFloat, alpha:CGFloat? = 1) -> NSColor {
return NSColor(red: r/255, green: g/255, blue: b/255, alpha: alpha!)
}
}
let fillColor = RGB(r: 33, g: 150, b: 243)
Possibly not having to use Cocoa.
I would like a function like this: hexToNSColor("#2196f3")
Can you give me a hand?
you could try something like this:
EDIT: included toHex(alpha:), from code I probably got from the net somewhere many years ago.
EDIT3,4: included the case for #RRGGBBAA
EDIT 5: stripping blank spaces in the hex string, to make NSColor (hex:" # 2196f380 ") work as well.
extension NSColor {
convenience init(hex: String) {
let trimHex = hex.trimmingCharacters(in: .whitespacesAndNewlines)
let dropHash = String(trimHex.dropFirst()).trimmingCharacters(in: .whitespacesAndNewlines)
let hexString = trimHex.starts(with: "#") ? dropHash : trimHex
let ui64 = UInt64(hexString, radix: 16)
let value = ui64 != nil ? Int(ui64!) : 0
// #RRGGBB
var components = (
R: CGFloat((value >> 16) & 0xff) / 255,
G: CGFloat((value >> 08) & 0xff) / 255,
B: CGFloat((value >> 00) & 0xff) / 255,
a: CGFloat(1)
)
if String(hexString).count == 8 {
// #RRGGBBAA
components = (
R: CGFloat((value >> 24) & 0xff) / 255,
G: CGFloat((value >> 16) & 0xff) / 255,
B: CGFloat((value >> 08) & 0xff) / 255,
a: CGFloat((value >> 00) & 0xff) / 255
)
}
self.init(red: components.R, green: components.G, blue: components.B, alpha: components.a)
}
func toHex(alpha: Bool = false) -> String? {
guard let components = cgColor.components, components.count >= 3 else {
return nil
}
let r = Float(components[0])
let g = Float(components[1])
let b = Float(components[2])
var a = Float(1.0)
if components.count >= 4 {
a = Float(components[3])
}
if alpha {
return String(format: "%02lX%02lX%02lX%02lX", lroundf(r * 255), lroundf(g * 255), lroundf(b * 255), lroundf(a * 255))
} else {
return String(format: "%02lX%02lX%02lX", lroundf(r * 255), lroundf(g * 255), lroundf(b * 255))
}
}
}
let nscol = NSColor(hex: "#2196f3") // <-- with or without #
EDIT2:
you can do the same for UIColor, and for Color (with UIColor or NSColor):
extension Color {
public init(hex: String) {
self.init(UIColor(hex: hex))
}
public func toHex(alpha: Bool = false) -> String? {
UIColor(self).toHex(alpha: alpha)
}
}
/*
// With hash
let color: NSColor = NSColor(hexString: "#ff8942")
// Without hash, with alpha
let secondColor: NSColor = NSColor(hexString: "ff8942", alpha: 0.5)
// Short handling
let shortColorWithHex: NSColor = NSColor(hexString: "fff")
// From a real hex value (an `Int`)
// With hash
let color: NSColor = NSColor(hex: 0xff8942)
// Without hash, with alpha
let secondColor: NSColor = NSColor(hex: 0xff8942, alpha: 0.5)
*/
#if os(iOS) || os(tvOS) || os(watchOS)
import UIKit
typealias SWColor = UIColor
#else
import Cocoa
typealias SWColor = NSColor
#endif
private extension Int64 {
func duplicate4bits() -> Int64 {
return (self << 4) + self
}
}
/// An extension of UIColor (on iOS) or NSColor (on OSX) providing HEX color handling.
public extension SWColor {
private convenience init?(hex3: Int64, alpha: Float) {
self.init(red: CGFloat( ((hex3 & 0xF00) >> 8).duplicate4bits() ) / 255.0,
green: CGFloat( ((hex3 & 0x0F0) >> 4).duplicate4bits() ) / 255.0,
blue: CGFloat( ((hex3 & 0x00F) >> 0).duplicate4bits() ) / 255.0,
alpha: CGFloat(alpha))
}
private convenience init?(hex4: Int64, alpha: Float?) {
self.init(red: CGFloat( ((hex4 & 0xF000) >> 12).duplicate4bits() ) / 255.0,
green: CGFloat( ((hex4 & 0x0F00) >> 8).duplicate4bits() ) / 255.0,
blue: CGFloat( ((hex4 & 0x00F0) >> 4).duplicate4bits() ) / 255.0,
alpha: alpha.map(CGFloat.init(_:)) ?? CGFloat( ((hex4 & 0x000F) >> 0).duplicate4bits() ) / 255.0)
}
private convenience init?(hex6: Int64, alpha: Float) {
self.init(red: CGFloat( (hex6 & 0xFF0000) >> 16 ) / 255.0,
green: CGFloat( (hex6 & 0x00FF00) >> 8 ) / 255.0,
blue: CGFloat( (hex6 & 0x0000FF) >> 0 ) / 255.0, alpha: CGFloat(alpha))
}
private convenience init?(hex8: Int64, alpha: Float?) {
self.init(red: CGFloat( (hex8 & 0xFF000000) >> 24 ) / 255.0,
green: CGFloat( (hex8 & 0x00FF0000) >> 16 ) / 255.0,
blue: CGFloat( (hex8 & 0x0000FF00) >> 8 ) / 255.0,
alpha: alpha.map(CGFloat.init(_:)) ?? CGFloat( (hex8 & 0x000000FF) >> 0 ) / 255.0)
}
/**
Create non-autoreleased color with in the given hex string and alpha.
- parameter hexString: The hex string, with or without the hash character.
- parameter alpha: The alpha value, a floating value between 0 and 1.
- returns: A color with the given hex string and alpha.
*/
convenience init?(hexString: String, alpha: Float? = nil) {
var hex = hexString
// Check for hash and remove the hash
if hex.hasPrefix("#") {
hex = String(hex[hex.index(after: hex.startIndex)...])
}
guard let hexVal = Int64(hex, radix: 16) else {
self.init()
return nil
}
switch hex.count {
case 3:
self.init(hex3: hexVal, alpha: alpha ?? 1.0)
case 4:
self.init(hex4: hexVal, alpha: alpha)
case 6:
self.init(hex6: hexVal, alpha: alpha ?? 1.0)
case 8:
self.init(hex8: hexVal, alpha: alpha)
default:
// Note:
// The swift 1.1 compiler is currently unable to destroy partially initialized classes in all cases,
// so it disallows formation of a situation where it would have to. We consider this a bug to be fixed
// in future releases, not a feature. -- Apple Forum
self.init()
return nil
}
}
/**
Create non-autoreleased color with in the given hex value and alpha
- parameter hex: The hex value. For example: 0xff8942 (no quotation).
- parameter alpha: The alpha value, a floating value between 0 and 1.
- returns: color with the given hex value and alpha
*/
convenience init?(hex: Int, alpha: Float = 1.0) {
if (0x000000 ... 0xFFFFFF) ~= hex {
self.init(hex6: Int64(hex), alpha: alpha)
} else {
self.init()
return nil
}
}
convenience init?(argbHex: Int) {
if (0x00000000 ... 0xFFFFFFFF) ~= argbHex {
let hex = Int64(argbHex)
self.init(red: CGFloat( (hex & 0x00FF0000) >> 16 ) / 255.0,
green: CGFloat( (hex & 0x0000FF00) >> 8 ) / 255.0,
blue: CGFloat( (hex & 0x000000FF) >> 0 ) / 255.0,
alpha: CGFloat( (hex & 0xFF000000) >> 24 ) / 255.0)
} else {
self.init()
return nil
}
}
convenience init?(argbHexString: String) {
var hex = argbHexString
// Check for hash and remove the hash
if hex.hasPrefix("#") {
hex = String(hex[hex.index(after: hex.startIndex)...])
}
guard hex.count == 8, let hexVal = Int64(hex, radix: 16) else {
self.init()
return nil
}
self.init(red: CGFloat( (hexVal & 0x00FF0000) >> 16 ) / 255.0,
green: CGFloat( (hexVal & 0x0000FF00) >> 8 ) / 255.0,
blue: CGFloat( (hexVal & 0x000000FF) >> 0 ) / 255.0,
alpha: CGFloat( (hexVal & 0xFF000000) >> 24 ) / 255.0)
}
}

How to convert Color Literal to cgColor in the Swift?

var CodeBackground = #colorLiteral(red: 0.1294117647, green: 0.1294117647, blue: 0.1960784314, alpha: 1)
cells?.layer.borderColor = //... how can i set this color literal to cgColor?
As I know how to convert that UIColor to cgColor in the Swift
as example
UIColor.black.cgColor
Bingo, but what about Color Literal to cgColor in the Swift
Thank you.
As, You already know the simpler way of using colorLiteral as cgcolor, I'll jump to the other way of doing that...
For that you need a Custom Function which gets the color-value(red , green , blue) from the colorLiteral , which is as below
extension UIColor {
func rgb() -> (red:Int, green:Int, blue:Int, alpha:Int)? {
var fRed : CGFloat = 0
var fGreen : CGFloat = 0
var fBlue : CGFloat = 0
var fAlpha: CGFloat = 0
if self.getRed(&fRed, green: &fGreen, blue: &fBlue, alpha: &fAlpha) {
let iRed = Int(fRed * 255.0)
let iGreen = Int(fGreen * 255.0)
let iBlue = Int(fBlue * 255.0)
let iAlpha = Int(fAlpha)
_ = (iAlpha << 24) + (iRed << 16) + (iGreen << 8) + iBlue
return (red:iRed, green:iGreen, blue:iBlue, alpha:iAlpha)
} else {
// Could not extract RGBA components:
return nil
}
}
}
//It's more convenient to use function in `UIColor` extension
Now , after this function created you can convert colorliteral into cgColor as below...
let CodeBackground = #colorLiteral(red: 0.1294117647, green: 0.1294117647, blue: 0.1960784314, alpha: 1)
let rgblit = CodeBackground.rgb()
let Converted_cgColor = CGColor(srgbRed: CGFloat(integerLiteral: rgblit!.red), green: CGFloat(integerLiteral: rgblit!.green), blue: CGFloat(integerLiteral: rgblit!.blue), alpha: CGFloat(integerLiteral: rgblit!.alpha))
You can directly use Converted_cgColor like
cells?.layer.borderColor = Converted_cgColor
HOPE IT HELPS

Convert UIColor initialization to Color Literal Xcode

I have multiple different UIColor objects. Some of them are initialized by a constructor some of them are shown as color literals.
static let optionsHeader = UIColor([ ]) // XCode is showing a color rect.
static let optionButtonSelected = UIColor(red: 0.865, green: 0.804, blue: 0.0, alpha: 1.0)
How can I convert the UIColor.init(...) statements to a color literal.
RGB color literal is same as UIColor initialization:
#colorLiteral(red: 1, green: 1, blue: 1, alpha: 1)
Or you can select color after typing #colorLiteral().
You can also use extensions to use hex color instead of inputting rgba values
extension UIColor {
convenience init(hexString: String, alpha: CGFloat = 1.0) {
let hexString: String = hexString.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines)
let scanner = Scanner(string: hexString)
if (hexString.hasPrefix("#")) {
scanner.scanLocation = 1
}
var color: UInt32 = 0
scanner.scanHexInt32(&color)
let mask = 0x000000FF
let r = Int(color >> 16) & mask
let g = Int(color >> 8) & mask
let b = Int(color) & mask
let red = CGFloat(r) / 255.0
let green = CGFloat(g) / 255.0
let blue = CGFloat(b) / 255.0
self.init(red:red, green:green, blue:blue, alpha:alpha)
}
func toHexString() -> String {
var r:CGFloat = 0
var g:CGFloat = 0
var b:CGFloat = 0
var a:CGFloat = 0
getRed(&r, green: &g, blue: &b, alpha: &a)
let rgb:Int = (Int)(r*255)<<16 | (Int)(g*255)<<8 | (Int)(b*255)<<0
return String(format:"#%06x", rgb)
}
}
Then you can code it with:
self.backgroundColor = UIColor(hexString: "#4A4A4A")

Converting CGFloat, CGFloat, CGFloat into UIColor?

I have an array of colours that look like this...
var purpleShades: [(CGFloat, CGFloat, CGFloat)] = [(186.0/255.0, 85.0/255.0, 211.0/255.0), (147.0/255.0, 112.0/255.0, 219.0/255.0), (138.0/255.0, 43.0/255.0, 226.0/255.0), (148.0/255.0, 0.0/255.0, 211.0/255.0), (153.0/255.0, 50.0/255.0, 204.0/255.0), (139.0/255.0, 0.0/255.0, 139.0/255.0)]
rather than duplicate code was wondering if anyone could help convert it to UIColor, so I can use it for this piece of code.
cell.tintColor = grayShades[Int(index)]
This variation of init might help you
It accepts red, green, blue and alpha as parameters.
Here's a nice extension to UIColor:
extension UIColor {
convenience init(hex: UInt, alpha: CGFloat) {
var red, green, blue: UInt
red = ((hex & 0xFF0000) >> 16)
green = ((hex & 0x00FF00) >> 8)
blue = hex & 0x0000FF
self.init(red: CGFloat(red) / 255, green: CGFloat(green) / 255, blue: CGFloat(blue) / 255, alpha: alpha)
}
}
With that you can write:
let purple = UIColor(hex: 0x9932CC, alpha: 1)
If you have a lot of colours, another extension on UIColor gives you…
extension UIColor {
static let darkOrchid = UIColor(hex: 0x 9932CC, alpha: 1)
static let darkMagenta = UIColor(hex: 0x 8B008B, alpha: 1)
static let indigo = UIColor(hex: 0x 4B0082, alpha: 1)
}
which allows you to say, for example…
cell.tintColor = .darkOrchid

How can I make a Swift enum with UIColor value?

I'm making a drawing app and I would like to refer to my colors through use of an enum. For example, it would be cleaner and more convenient to use Colors.RedColor instead of typing out values every time I want that red color. However, Swift's raw value enums don't seem to accept UIColor as a type. Is there a way to do this with an enum or something similar?
I do it like this (basically using a struct as a namespace):
extension UIColor {
struct MyTheme {
static var firstColor: UIColor { return UIColor(red: 1, green: 0, blue: 0, alpha: 1) }
static var secondColor: UIColor { return UIColor(red: 0, green: 1, blue: 0, alpha: 1) }
}
}
And you use it like:
UIColor.MyTheme.firstColor
So you can have a red color inside your custom theme.
If your color isn't one of those defined by UIColor's convenience method, you can add an extension to UIColor:
extension UIColor {
static var firstColor: UIColor { return UIColor(red: 1, green: 0, blue: 0, alpha: 1) }
static var secondColor: UIColor { return UIColor(red: 0, green: 1, blue: 0, alpha: 1) }
}
// Usage
let myColor = UIColor.firstColor
I use computed properties to solve this problem, this is my code
enum MyColor {
case navigationBarBackgroundColor
case navigationTintCololr
}
extension MyColor {
var value: UIColor {
get {
switch self {
case .navigationBarBackgroundColor:
return UIColor(red: 67/255, green: 173/255, blue: 247/255, alpha: 1.0)
case .navigationTintCololr:
return UIColor.white
}
}
}
}
then I can use MyColor like this:
MyColor.navigationBarBackgroundColor.value
How can I make a Swift enum with UIColor value?
This is how you would literally make an enum with a UIColor value:
import UIKit
final class Color: UIColor, RawRepresentable, ExpressibleByStringLiteral
{
// MARK:- ExpressibleByStringLiteral
typealias StringLiteralType = String
convenience init(stringLiteral: String) {
guard let (a,r,g,b) = Color.argb(hexColor: stringLiteral) else {
assertionFailure("Invalid string")
self.init(red: 0, green: 0, blue: 0, alpha: 0)
return
}
self.init(red: r, green: g, blue: b, alpha: a)
}
// MARK:- RawRepresentable
public typealias RawValue = String
convenience init?(rawValue: RawValue) {
guard let (a,r,g,b) = Color.argb(hexColor: rawValue) else { return nil }
self.init(red: r, green: g, blue: b, alpha: a)
}
var rawValue: RawValue {
return hexString()
}
// MARK:- Private
/// Return color components in range [0,1] for hexadecimal color strings.
/// - hexColor: case-insensitive string with format RGB, RRGGBB, or AARRGGBB.
private static func argb(hexColor: String) -> (CGFloat,CGFloat,CGFloat,CGFloat)?
{
let hexAlphabet = "0123456789abcdefABCDEF"
let hex = hexColor.trimmingCharacters(in: CharacterSet(charactersIn: hexAlphabet).inverted)
var int = UInt32()
Scanner(string: hex).scanHexInt32(&int)
let a, r, g, b: UInt32
switch hex.count {
case 3: (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17) // RGB
case 6: (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF) // RRGGBB
case 8: (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF) // AARRGGBB
default: return nil
}
return (CGFloat(a)/255, CGFloat(r)/255, CGFloat(g)/255, CGFloat(b)/255)
}
private func hexString() -> String {
var red: CGFloat = 0
var green: CGFloat = 0
var blue: CGFloat = 0
var alpha: CGFloat = 0
if self.getRed(&red, green: &green, blue: &blue, alpha: &alpha) {
return String(format: "#%02X%02X%02X%02X", UInt8(red * 255), UInt8(green * 255), UInt8(blue * 255), UInt8(alpha * 255))
}
assertionFailure("Invalid colour space.")
return "#F00"
}
}
enum Colors: Color {
case red = "#F00"
// case blue = "#F00" // Raw value for enum case is not unique
}
let color3 = Color(rawValue: "#000") // RGB
let color6 = Color(rawValue: "#123456") // RRGGBB
let color8 = Color(rawValue: "#12345678") // AARRGGBB
print(Colors(rawValue:"#F00") as Any) // red
print(Colors(rawValue:"#FF0000") as Any) // red
print(Colors(rawValue:"#FFFF0000") as Any) // red
print(Colors(rawValue:"#ABC") as Any) // nil because it’s not a member of the enumeration
// print(Colors(rawValue:"#XYZ") as Any) // assertion on debug, black on release
print(Colors.red) // red
print(Colors.red.rawValue) // UIExtendedSRGBColorSpace 1 0 0 1
With help from
benhurott/UIColorExtension.md
Swift 3 UIColor utilities for random color and color from hex code
Raw type 'Bool' is not expressible by any literal
This can be done much more succinctly (and should):
extension UIColor
{
static let myColor = UIColor(displayP3Red: 0.0, green: 0.7, blue: 0.0, alpha: 1.0)
}
(Any other method that returns a UIColor is equally suitable, doesn't need to be displayP3Red)
Usage:
let someColor: UIColor = .myColor
Actually I use such implementation, it is very convenience for me because of two reason, first one I can use dex value and another all colors in constant
import UIKit
struct ColorPalette {
struct Gray {
static let Light = UIColor(netHex: 0x595959)
static let Medium = UIColor(netHex: 0x262626)
}
}
extension UIColor {
convenience init(red: Int, green: Int, blue: Int) {
assert(red >= 0 && red <= 255, "Invalid red component")
assert(green >= 0 && green <= 255, "Invalid green component")
assert(blue >= 0 && blue <= 255, "Invalid blue component")
self.init(red: CGFloat(red) / 255.0, green: CGFloat(green) / 255.0, blue: CGFloat(blue) / 255.0, alpha: 1.0)
}
convenience init(netHex: Int) {
self.init(red: (netHex >> 16) & 0xff, green: (netHex >> 8) & 0xff, blue: netHex & 0xff)
}
}
usage
let backgroundGreyColor = ColorPalette.Gray.Medium.cgColor
If you want to return multiple value then use below code...it's absolutely
working for me....
enum GetDriverStatus : String {
case ClockIn = "Clock In"
case TripStart = "Trip Start"
case BeaconTouchPlant = "Beacon Touch Plant"
case PickUp = "Pick Up"
case BeaconTouchSite = "Beacon Touch Site"
case BeaconLeftSite = "Beacon Left Site"
case DropOff = "Drop Off"
case BreakIn = "Break In"
case BreakOut = "Break Out"
case TripEnd = "Trip End"
case DayEnd = "Day End"
//case ClockOut = "Clock Out"
//Get data from ID
static var allValues: [GetDriverStatus] {
return [
.ClockIn,
.TripStart,
.BeaconTouchPlant,
.PickUp,
.BeaconTouchSite,
.BeaconLeftSite,
.DropOff,
.BreakIn,
.BreakOut,
.TripEnd,
.DayEnd
]
}
//Get Color
var colorAndStatus: (UIColor,String) {
get {
switch self {
case .ClockIn,.TripStart: //Idle
return (UIColor(red: 248/255, green: 39/255, blue: 71/255, alpha: 1.0),"Idle") //dark pink-red
case .BeaconTouchPlant,.PickUp:
return (UIColor(red: 46/255, green: 180/255, blue: 42/255, alpha: 1.0),"Picking up") //Green
case .BeaconTouchSite:
return (UIColor(red: 252/255, green: 172/255, blue: 0/255, alpha: 1.0),"On site") //orange
case .DropOff,.BeaconLeftSite:
return (UIColor(red: 12/255, green: 90/255, blue: 255/255, alpha: 1.0),"Dropping off") //blue
case .BreakIn,.BreakOut:
return (UIColor(red: 151/255, green: 151/255, blue: 151/255, alpha: 1.0),"On break") //warm-grey-two
case .TripEnd:
return (UIColor.black,"Trip end")
case .DayEnd:
return (UIColor.black,"Done for the day")
}
}
}
}
How to use this code
Passing .allvalues["index of your option"] you getting UIColor at 0 position as well as String value as 1 position
GetDriverStatus.allValues[1].colorAndStatus.0 //UIColor.Black
GetDriverStatus.allValues[2].colorAndStatus.1 //"Picking up"
Based on #Jano's answer I made an improvement by using Int as the literal type:
import UIKit
public final class Colors: UIColor {
}
extension Colors: ExpressibleByIntegerLiteral {
public typealias IntegerLiteralType = Int
public convenience init(integerLiteral value: Int) {
let red = CGFloat((value & 0xFF0000FF) >> 24) / 0xFF
let green = CGFloat((value & 0x00FF00FF) >> 16) / 0xFF
let blue = CGFloat((value & 0x0000FFFF) >> 8) / 0xFF
let alpha = CGFloat(value & 0x00FF00FF) / 0xFF
self.init(red: red, green: green, blue: blue, alpha: alpha)
}
}
extension Colors: RawRepresentable {
public typealias RawValue = Int
public var rawValue: RawValue {
return hex
}
public convenience init?(rawValue: RawValue) {
self.init(integerLiteral: rawValue)
}
}
fileprivate extension UIColor {
var hex: Int {
var fRed: CGFloat = 0
var fGreen: CGFloat = 0
var fBlue: CGFloat = 0
var fAlpha: CGFloat = 0
if self.getRed(&fRed, green: &fGreen, blue: &fBlue, alpha: &fAlpha) {
let red = Int(fRed * 255.0)
let green = Int(fGreen * 255.0)
let blue = Int(fBlue * 255.0)
let alpha = Int(fAlpha * 255.0)
let rgb = (alpha << 24) + (red << 16) + (green << 8) + blue
return rgb
} else {
return 0x000000
}
}
}
public enum MainPalette: Colors {
case red = 0xFF0000ff
case white = 0xFFFFFFFF
}
public enum FeatureXPalette: Colors {
case blue = 0x024F9Eff
// case bluish = 0x024F9Eff // <- Can't do
case red = 0xFF0000ff
}
The advantage is that it doesn't allow duplicate colors (as a true enum) and also I support alpha.
As you can see, you can create multiple enums for different palettes/schemes. In the case you want views to be able to use any palette, you can just add a protocol:
protocol Color {
var color: UIColor { get }
}
extension MainPalette: Color {
var color: UIColor {
return rawValue
}
}
extension FeatureXPalette: Color {
var color: UIColor {
return rawValue
}
}
so that way you can have a function that takes in the protocol:
func printColorEquality(color1: Color, color2: Color) {
print(color1.color == color2.color)
}
let red1: Color = MainPalette.red
let red2: Color = FeatureXPalette.red
printColorEquality(color1: red1, color2: red2)
What I also like to do is add static vars for convenience:
extension MainPalette {
public static var brightRed: UIColor {
return MainPalette.red.color
}
}
that gives you a cleaner api:
view.backgroundColor = MainPalette.brightRed
Naming can be improved: you have to choose if you want a nice convenience api or nice naming for your enums.
This answer is probably late, but for others finding this question.
I was not satisfied with the answers above, since adding colors as UIColors extension is not always what you want, since:
It might not be the best solution from Software architecture perspective.
You can not use the power enums have, e.g. CaseIterable
This is the solution I came up with:
enum PencilColor {
case lightRed
case darkPurple
var associatedColor: UIColor {
switch self {
case .lightRed: return UIColor(red: 67/255, green: 173/255, blue: 247/255, alpha: 1.0)
case .darkPurple: return UIColor(red: 67/255, green: 173/255, blue: 247/255, alpha: 1.0)
}
}
}