'scanHexInt32' was deprecated in iOS 13.0 - swift

What is alternate of scanHexInt32 in iOS 13 (Swift 5+)?
extension UIColor {
//--------------------------------------------
class func hexColor(hex:String) -> UIColor {
var cString:String = hex.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines).uppercased()
if (cString.hasPrefix("#")) {
cString = String(cString[cString.index(cString.startIndex, offsetBy: 1)...])
}
if (cString.count != 6) {
return UIColor.gray
}
var rgbValue:UInt32 = 0
// warning in this line - 'scanHexInt32' was deprecated in iOS 13.0
Scanner(string: cString).scanHexInt32(&rgbValue)
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}
}
Ref: Snapshot

Update to use UInt64 and scanHexInt64:
convenience init(hex: String, alpha: CGFloat = 1.0) {
var hexFormatted: String = hex.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines).uppercased()
if hexFormatted.hasPrefix("#") {
hexFormatted = String(hexFormatted.dropFirst())
}
assert(hexFormatted.count == 6, "Invalid hex code used.")
var rgbValue: UInt64 = 0
Scanner(string: hexFormatted).scanHexInt64(&rgbValue)
self.init(red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: alpha)
}

Looks like apple is phasing out Int32 from their 64bit OSs. Try convert your code to use Int64 instead.
#available(iOS, introduced: 2.0, deprecated: 13.0)
open func scanHexInt32(_ result: UnsafeMutablePointer<UInt32>?) -> Bool // Optionally prefixed with "0x" or "0X"
#available(iOS 2.0, *)
open func scanHexInt64(_ result: UnsafeMutablePointer<UInt64>?) -> Bool // Optionally prefixed with "0x" or "0X"

There is another instance method available:
scanInt32(representation:)
Declaration:
func scanInt32(representation: Scanner.NumberRepresentation = .decimal) -> Int32?
Here, you have to pass the enum value .hexadecimal.
I hope it will return the same result. The result will be optional.

Try this extension swiftui :
extension Color {
init(hex: String) {
let scanner = Scanner(string: hex)
scanner.currentIndex = scanner.string.startIndex
var rgbValue: UInt64 = 0
scanner.scanHexInt64(&rgbValue)
let r = (rgbValue & 0xff0000) >> 16
let g = (rgbValue & 0xff00) >> 8
let b = rgbValue & 0xff
self.init(red: Double(r) / 0xff, green: Double(g) / 0xff, blue: Double(b) / 0xff)
}
}

I am using this uicolor extension.. please find the below code
extension UIColor {
class func hexColor(hex:String) -> UIColor {
var cString:String = hex.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
if (cString.hasPrefix("#")) {
cString.remove(at: cString.startIndex)
}
if ((cString.count) != 6) {
return UIColor.gray
}
var rgbValue:UInt64 = 0
Scanner(string: cString).scanHexInt64(&rgbValue)
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
} }

Swift 5
extension String {
var color: UIColor {
let hex = trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
if #available(iOS 13, *) {
guard let int = Scanner(string: hex).scanInt32(representation: .hexadecimal) else { return #colorLiteral(red: 1.0, green: 1.0, blue: 1.0, alpha: 1.0) }
let a, r, g, b: Int32
switch hex.count {
case 3: (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17) // RGB (12-bit)
case 6: (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF) // RGB (24-bit)
case 8: (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF) // ARGB (32-bit)
default: (a, r, g, b) = (255, 0, 0, 0)
}
return UIColor(red: CGFloat(r) / 255.0, green: CGFloat(g) / 255.0, blue: CGFloat(b) / 255.0, alpha: CGFloat(a) / 255.0)
} else {
var int = UInt32()
Scanner(string: hex).scanHexInt32(&int)
let a, r, g, b: UInt32
switch hex.count {
case 3: (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17) // RGB (12-bit)
case 6: (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF) // RGB (24-bit)
case 8: (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF) // ARGB (32-bit)
default: (a, r, g, b) = (255, 0, 0, 0)
}
return UIColor(red: CGFloat(r) / 255.0, green: CGFloat(g) / 255.0, blue: CGFloat(b) / 255.0, alpha: CGFloat(a) / 255.0)
}
}
}

This combines a couple of the answers, providing both UInt64 and String representations. It also removes the "convenience" keyword which is unnecessary for Color because it is a struct (whereas it was needed for UIColor since it is a class). Lastly, it uses Double instead of CGFloat - again, necessary for Color.
extension Color
{
init(_ hex: UInt64, opacity: Double = 1.0)
{
let red = Double((hex & 0xff0000) >> 16) / 255.0
let green = Double((hex & 0xff00) >> 8) / 255.0
let blue = Double((hex & 0xff) >> 0) / 255.0
self.init(.sRGB, red: red, green: green, blue: blue, opacity: opacity)
}
init(hex: String, opacity: Double = 1.0)
{
// convert the String into an UInt64 and then convert into Color
var hexFormatted: String = hex.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines).uppercased()
if hexFormatted.hasPrefix("#")
{
hexFormatted = String(hexFormatted.dropFirst())
}
assert(hexFormatted.count == 6, "Invalid hex code used.")
var rgbValue: UInt64 = 0
Scanner(string: hexFormatted).scanHexInt64(&rgbValue)
self.init(rgbValue, opacity: opacity)
}
}

Related

Swift 5.4 hex to NSColor

I am developing a program for macOS.
I need to convert a hex color to NSColor.
I looked at the proposed solutions here:
Convert Hex Color Code to NSColor
How to convert hex to NSColor?
But none of it works correctly with Xcode 12.5.1.
At the moment I did this, it works correctly:
extension NSObject {
func RGB(r:CGFloat, g:CGFloat, b:CGFloat, alpha:CGFloat? = 1) -> NSColor {
return NSColor(red: r/255, green: g/255, blue: b/255, alpha: alpha!)
}
}
let fillColor = RGB(r: 33, g: 150, b: 243)
Possibly not having to use Cocoa.
I would like a function like this: hexToNSColor("#2196f3")
Can you give me a hand?
you could try something like this:
EDIT: included toHex(alpha:), from code I probably got from the net somewhere many years ago.
EDIT3,4: included the case for #RRGGBBAA
EDIT 5: stripping blank spaces in the hex string, to make NSColor (hex:" # 2196f380 ") work as well.
extension NSColor {
convenience init(hex: String) {
let trimHex = hex.trimmingCharacters(in: .whitespacesAndNewlines)
let dropHash = String(trimHex.dropFirst()).trimmingCharacters(in: .whitespacesAndNewlines)
let hexString = trimHex.starts(with: "#") ? dropHash : trimHex
let ui64 = UInt64(hexString, radix: 16)
let value = ui64 != nil ? Int(ui64!) : 0
// #RRGGBB
var components = (
R: CGFloat((value >> 16) & 0xff) / 255,
G: CGFloat((value >> 08) & 0xff) / 255,
B: CGFloat((value >> 00) & 0xff) / 255,
a: CGFloat(1)
)
if String(hexString).count == 8 {
// #RRGGBBAA
components = (
R: CGFloat((value >> 24) & 0xff) / 255,
G: CGFloat((value >> 16) & 0xff) / 255,
B: CGFloat((value >> 08) & 0xff) / 255,
a: CGFloat((value >> 00) & 0xff) / 255
)
}
self.init(red: components.R, green: components.G, blue: components.B, alpha: components.a)
}
func toHex(alpha: Bool = false) -> String? {
guard let components = cgColor.components, components.count >= 3 else {
return nil
}
let r = Float(components[0])
let g = Float(components[1])
let b = Float(components[2])
var a = Float(1.0)
if components.count >= 4 {
a = Float(components[3])
}
if alpha {
return String(format: "%02lX%02lX%02lX%02lX", lroundf(r * 255), lroundf(g * 255), lroundf(b * 255), lroundf(a * 255))
} else {
return String(format: "%02lX%02lX%02lX", lroundf(r * 255), lroundf(g * 255), lroundf(b * 255))
}
}
}
let nscol = NSColor(hex: "#2196f3") // <-- with or without #
EDIT2:
you can do the same for UIColor, and for Color (with UIColor or NSColor):
extension Color {
public init(hex: String) {
self.init(UIColor(hex: hex))
}
public func toHex(alpha: Bool = false) -> String? {
UIColor(self).toHex(alpha: alpha)
}
}
/*
// With hash
let color: NSColor = NSColor(hexString: "#ff8942")
// Without hash, with alpha
let secondColor: NSColor = NSColor(hexString: "ff8942", alpha: 0.5)
// Short handling
let shortColorWithHex: NSColor = NSColor(hexString: "fff")
// From a real hex value (an `Int`)
// With hash
let color: NSColor = NSColor(hex: 0xff8942)
// Without hash, with alpha
let secondColor: NSColor = NSColor(hex: 0xff8942, alpha: 0.5)
*/
#if os(iOS) || os(tvOS) || os(watchOS)
import UIKit
typealias SWColor = UIColor
#else
import Cocoa
typealias SWColor = NSColor
#endif
private extension Int64 {
func duplicate4bits() -> Int64 {
return (self << 4) + self
}
}
/// An extension of UIColor (on iOS) or NSColor (on OSX) providing HEX color handling.
public extension SWColor {
private convenience init?(hex3: Int64, alpha: Float) {
self.init(red: CGFloat( ((hex3 & 0xF00) >> 8).duplicate4bits() ) / 255.0,
green: CGFloat( ((hex3 & 0x0F0) >> 4).duplicate4bits() ) / 255.0,
blue: CGFloat( ((hex3 & 0x00F) >> 0).duplicate4bits() ) / 255.0,
alpha: CGFloat(alpha))
}
private convenience init?(hex4: Int64, alpha: Float?) {
self.init(red: CGFloat( ((hex4 & 0xF000) >> 12).duplicate4bits() ) / 255.0,
green: CGFloat( ((hex4 & 0x0F00) >> 8).duplicate4bits() ) / 255.0,
blue: CGFloat( ((hex4 & 0x00F0) >> 4).duplicate4bits() ) / 255.0,
alpha: alpha.map(CGFloat.init(_:)) ?? CGFloat( ((hex4 & 0x000F) >> 0).duplicate4bits() ) / 255.0)
}
private convenience init?(hex6: Int64, alpha: Float) {
self.init(red: CGFloat( (hex6 & 0xFF0000) >> 16 ) / 255.0,
green: CGFloat( (hex6 & 0x00FF00) >> 8 ) / 255.0,
blue: CGFloat( (hex6 & 0x0000FF) >> 0 ) / 255.0, alpha: CGFloat(alpha))
}
private convenience init?(hex8: Int64, alpha: Float?) {
self.init(red: CGFloat( (hex8 & 0xFF000000) >> 24 ) / 255.0,
green: CGFloat( (hex8 & 0x00FF0000) >> 16 ) / 255.0,
blue: CGFloat( (hex8 & 0x0000FF00) >> 8 ) / 255.0,
alpha: alpha.map(CGFloat.init(_:)) ?? CGFloat( (hex8 & 0x000000FF) >> 0 ) / 255.0)
}
/**
Create non-autoreleased color with in the given hex string and alpha.
- parameter hexString: The hex string, with or without the hash character.
- parameter alpha: The alpha value, a floating value between 0 and 1.
- returns: A color with the given hex string and alpha.
*/
convenience init?(hexString: String, alpha: Float? = nil) {
var hex = hexString
// Check for hash and remove the hash
if hex.hasPrefix("#") {
hex = String(hex[hex.index(after: hex.startIndex)...])
}
guard let hexVal = Int64(hex, radix: 16) else {
self.init()
return nil
}
switch hex.count {
case 3:
self.init(hex3: hexVal, alpha: alpha ?? 1.0)
case 4:
self.init(hex4: hexVal, alpha: alpha)
case 6:
self.init(hex6: hexVal, alpha: alpha ?? 1.0)
case 8:
self.init(hex8: hexVal, alpha: alpha)
default:
// Note:
// The swift 1.1 compiler is currently unable to destroy partially initialized classes in all cases,
// so it disallows formation of a situation where it would have to. We consider this a bug to be fixed
// in future releases, not a feature. -- Apple Forum
self.init()
return nil
}
}
/**
Create non-autoreleased color with in the given hex value and alpha
- parameter hex: The hex value. For example: 0xff8942 (no quotation).
- parameter alpha: The alpha value, a floating value between 0 and 1.
- returns: color with the given hex value and alpha
*/
convenience init?(hex: Int, alpha: Float = 1.0) {
if (0x000000 ... 0xFFFFFF) ~= hex {
self.init(hex6: Int64(hex), alpha: alpha)
} else {
self.init()
return nil
}
}
convenience init?(argbHex: Int) {
if (0x00000000 ... 0xFFFFFFFF) ~= argbHex {
let hex = Int64(argbHex)
self.init(red: CGFloat( (hex & 0x00FF0000) >> 16 ) / 255.0,
green: CGFloat( (hex & 0x0000FF00) >> 8 ) / 255.0,
blue: CGFloat( (hex & 0x000000FF) >> 0 ) / 255.0,
alpha: CGFloat( (hex & 0xFF000000) >> 24 ) / 255.0)
} else {
self.init()
return nil
}
}
convenience init?(argbHexString: String) {
var hex = argbHexString
// Check for hash and remove the hash
if hex.hasPrefix("#") {
hex = String(hex[hex.index(after: hex.startIndex)...])
}
guard hex.count == 8, let hexVal = Int64(hex, radix: 16) else {
self.init()
return nil
}
self.init(red: CGFloat( (hexVal & 0x00FF0000) >> 16 ) / 255.0,
green: CGFloat( (hexVal & 0x0000FF00) >> 8 ) / 255.0,
blue: CGFloat( (hexVal & 0x000000FF) >> 0 ) / 255.0,
alpha: CGFloat( (hexVal & 0xFF000000) >> 24 ) / 255.0)
}
}

Hex codes/RGB values for SwiftUI colors?

is there a compiled list of hex codes or RGB values for SwiftUI colors? I'd like to know either for Color.purple but can't seem to find any good sources. Is there a way to programmatically determine the hex code or RGB value? Or maybe I can look at some properties of Color? Thanks in advance!
If you are coding in SwiftUI 2 you can convert your Color to UIColor and use getRed method to get the red, green, blue and alpha components. Once you have the components you can convert the values to hexa string:
extension Color {
var uiColor: UIColor { .init(self) }
typealias RGBA = (red: CGFloat, green: CGFloat, blue: CGFloat, alpha: CGFloat)
var rgba: RGBA? {
var (r, g, b, a): RGBA = (0, 0, 0, 0)
return uiColor.getRed(&r, green: &g, blue: &b, alpha: &a) ? (r, g, b, a) : nil
}
var hexaRGB: String? {
guard let (red, green, blue, _) = rgba else { return nil }
return String(format: "#%02x%02x%02x",
Int(red * 255),
Int(green * 255),
Int(blue * 255))
}
var hexaRGBA: String? {
guard let (red, green, blue, alpha) = rgba else { return nil }
return String(format: "#%02x%02x%02x%02x",
Int(red * 255),
Int(green * 255),
Int(blue * 255),
Int(alpha * 255))
}
}
Color.purple.hexaRGB // "#af52de"
Color.purple.hexaRGBA // "#af52deff"
if let (red, green, blue, alpha) = Color.purple.rgba {
red // 0.686274528503418
green // 0.321568638086319
blue // 0.8705882430076599
alpha // 1
}
Isn't it good to use it like this?
import UIKit
extension UIColor {
convenience init(hex:Int, alpha: CGFloat = 1.0) {
self.init(
red: CGFloat((hex & 0xFF0000) >> 16) / 255.0,
green: CGFloat((hex & 0x00FF00) >> 8) / 255.0,
blue: CGFloat((hex & 0x0000FF) >> 0) / 255.0,
alpha: alpha
)
}
}

How can I make a Swift enum with UIColor value?

I'm making a drawing app and I would like to refer to my colors through use of an enum. For example, it would be cleaner and more convenient to use Colors.RedColor instead of typing out values every time I want that red color. However, Swift's raw value enums don't seem to accept UIColor as a type. Is there a way to do this with an enum or something similar?
I do it like this (basically using a struct as a namespace):
extension UIColor {
struct MyTheme {
static var firstColor: UIColor { return UIColor(red: 1, green: 0, blue: 0, alpha: 1) }
static var secondColor: UIColor { return UIColor(red: 0, green: 1, blue: 0, alpha: 1) }
}
}
And you use it like:
UIColor.MyTheme.firstColor
So you can have a red color inside your custom theme.
If your color isn't one of those defined by UIColor's convenience method, you can add an extension to UIColor:
extension UIColor {
static var firstColor: UIColor { return UIColor(red: 1, green: 0, blue: 0, alpha: 1) }
static var secondColor: UIColor { return UIColor(red: 0, green: 1, blue: 0, alpha: 1) }
}
// Usage
let myColor = UIColor.firstColor
I use computed properties to solve this problem, this is my code
enum MyColor {
case navigationBarBackgroundColor
case navigationTintCololr
}
extension MyColor {
var value: UIColor {
get {
switch self {
case .navigationBarBackgroundColor:
return UIColor(red: 67/255, green: 173/255, blue: 247/255, alpha: 1.0)
case .navigationTintCololr:
return UIColor.white
}
}
}
}
then I can use MyColor like this:
MyColor.navigationBarBackgroundColor.value
How can I make a Swift enum with UIColor value?
This is how you would literally make an enum with a UIColor value:
import UIKit
final class Color: UIColor, RawRepresentable, ExpressibleByStringLiteral
{
// MARK:- ExpressibleByStringLiteral
typealias StringLiteralType = String
convenience init(stringLiteral: String) {
guard let (a,r,g,b) = Color.argb(hexColor: stringLiteral) else {
assertionFailure("Invalid string")
self.init(red: 0, green: 0, blue: 0, alpha: 0)
return
}
self.init(red: r, green: g, blue: b, alpha: a)
}
// MARK:- RawRepresentable
public typealias RawValue = String
convenience init?(rawValue: RawValue) {
guard let (a,r,g,b) = Color.argb(hexColor: rawValue) else { return nil }
self.init(red: r, green: g, blue: b, alpha: a)
}
var rawValue: RawValue {
return hexString()
}
// MARK:- Private
/// Return color components in range [0,1] for hexadecimal color strings.
/// - hexColor: case-insensitive string with format RGB, RRGGBB, or AARRGGBB.
private static func argb(hexColor: String) -> (CGFloat,CGFloat,CGFloat,CGFloat)?
{
let hexAlphabet = "0123456789abcdefABCDEF"
let hex = hexColor.trimmingCharacters(in: CharacterSet(charactersIn: hexAlphabet).inverted)
var int = UInt32()
Scanner(string: hex).scanHexInt32(&int)
let a, r, g, b: UInt32
switch hex.count {
case 3: (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17) // RGB
case 6: (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF) // RRGGBB
case 8: (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF) // AARRGGBB
default: return nil
}
return (CGFloat(a)/255, CGFloat(r)/255, CGFloat(g)/255, CGFloat(b)/255)
}
private func hexString() -> String {
var red: CGFloat = 0
var green: CGFloat = 0
var blue: CGFloat = 0
var alpha: CGFloat = 0
if self.getRed(&red, green: &green, blue: &blue, alpha: &alpha) {
return String(format: "#%02X%02X%02X%02X", UInt8(red * 255), UInt8(green * 255), UInt8(blue * 255), UInt8(alpha * 255))
}
assertionFailure("Invalid colour space.")
return "#F00"
}
}
enum Colors: Color {
case red = "#F00"
// case blue = "#F00" // Raw value for enum case is not unique
}
let color3 = Color(rawValue: "#000") // RGB
let color6 = Color(rawValue: "#123456") // RRGGBB
let color8 = Color(rawValue: "#12345678") // AARRGGBB
print(Colors(rawValue:"#F00") as Any) // red
print(Colors(rawValue:"#FF0000") as Any) // red
print(Colors(rawValue:"#FFFF0000") as Any) // red
print(Colors(rawValue:"#ABC") as Any) // nil because it’s not a member of the enumeration
// print(Colors(rawValue:"#XYZ") as Any) // assertion on debug, black on release
print(Colors.red) // red
print(Colors.red.rawValue) // UIExtendedSRGBColorSpace 1 0 0 1
With help from
benhurott/UIColorExtension.md
Swift 3 UIColor utilities for random color and color from hex code
Raw type 'Bool' is not expressible by any literal
This can be done much more succinctly (and should):
extension UIColor
{
static let myColor = UIColor(displayP3Red: 0.0, green: 0.7, blue: 0.0, alpha: 1.0)
}
(Any other method that returns a UIColor is equally suitable, doesn't need to be displayP3Red)
Usage:
let someColor: UIColor = .myColor
Actually I use such implementation, it is very convenience for me because of two reason, first one I can use dex value and another all colors in constant
import UIKit
struct ColorPalette {
struct Gray {
static let Light = UIColor(netHex: 0x595959)
static let Medium = UIColor(netHex: 0x262626)
}
}
extension UIColor {
convenience init(red: Int, green: Int, blue: Int) {
assert(red >= 0 && red <= 255, "Invalid red component")
assert(green >= 0 && green <= 255, "Invalid green component")
assert(blue >= 0 && blue <= 255, "Invalid blue component")
self.init(red: CGFloat(red) / 255.0, green: CGFloat(green) / 255.0, blue: CGFloat(blue) / 255.0, alpha: 1.0)
}
convenience init(netHex: Int) {
self.init(red: (netHex >> 16) & 0xff, green: (netHex >> 8) & 0xff, blue: netHex & 0xff)
}
}
usage
let backgroundGreyColor = ColorPalette.Gray.Medium.cgColor
If you want to return multiple value then use below code...it's absolutely
working for me....
enum GetDriverStatus : String {
case ClockIn = "Clock In"
case TripStart = "Trip Start"
case BeaconTouchPlant = "Beacon Touch Plant"
case PickUp = "Pick Up"
case BeaconTouchSite = "Beacon Touch Site"
case BeaconLeftSite = "Beacon Left Site"
case DropOff = "Drop Off"
case BreakIn = "Break In"
case BreakOut = "Break Out"
case TripEnd = "Trip End"
case DayEnd = "Day End"
//case ClockOut = "Clock Out"
//Get data from ID
static var allValues: [GetDriverStatus] {
return [
.ClockIn,
.TripStart,
.BeaconTouchPlant,
.PickUp,
.BeaconTouchSite,
.BeaconLeftSite,
.DropOff,
.BreakIn,
.BreakOut,
.TripEnd,
.DayEnd
]
}
//Get Color
var colorAndStatus: (UIColor,String) {
get {
switch self {
case .ClockIn,.TripStart: //Idle
return (UIColor(red: 248/255, green: 39/255, blue: 71/255, alpha: 1.0),"Idle") //dark pink-red
case .BeaconTouchPlant,.PickUp:
return (UIColor(red: 46/255, green: 180/255, blue: 42/255, alpha: 1.0),"Picking up") //Green
case .BeaconTouchSite:
return (UIColor(red: 252/255, green: 172/255, blue: 0/255, alpha: 1.0),"On site") //orange
case .DropOff,.BeaconLeftSite:
return (UIColor(red: 12/255, green: 90/255, blue: 255/255, alpha: 1.0),"Dropping off") //blue
case .BreakIn,.BreakOut:
return (UIColor(red: 151/255, green: 151/255, blue: 151/255, alpha: 1.0),"On break") //warm-grey-two
case .TripEnd:
return (UIColor.black,"Trip end")
case .DayEnd:
return (UIColor.black,"Done for the day")
}
}
}
}
How to use this code
Passing .allvalues["index of your option"] you getting UIColor at 0 position as well as String value as 1 position
GetDriverStatus.allValues[1].colorAndStatus.0 //UIColor.Black
GetDriverStatus.allValues[2].colorAndStatus.1 //"Picking up"
Based on #Jano's answer I made an improvement by using Int as the literal type:
import UIKit
public final class Colors: UIColor {
}
extension Colors: ExpressibleByIntegerLiteral {
public typealias IntegerLiteralType = Int
public convenience init(integerLiteral value: Int) {
let red = CGFloat((value & 0xFF0000FF) >> 24) / 0xFF
let green = CGFloat((value & 0x00FF00FF) >> 16) / 0xFF
let blue = CGFloat((value & 0x0000FFFF) >> 8) / 0xFF
let alpha = CGFloat(value & 0x00FF00FF) / 0xFF
self.init(red: red, green: green, blue: blue, alpha: alpha)
}
}
extension Colors: RawRepresentable {
public typealias RawValue = Int
public var rawValue: RawValue {
return hex
}
public convenience init?(rawValue: RawValue) {
self.init(integerLiteral: rawValue)
}
}
fileprivate extension UIColor {
var hex: Int {
var fRed: CGFloat = 0
var fGreen: CGFloat = 0
var fBlue: CGFloat = 0
var fAlpha: CGFloat = 0
if self.getRed(&fRed, green: &fGreen, blue: &fBlue, alpha: &fAlpha) {
let red = Int(fRed * 255.0)
let green = Int(fGreen * 255.0)
let blue = Int(fBlue * 255.0)
let alpha = Int(fAlpha * 255.0)
let rgb = (alpha << 24) + (red << 16) + (green << 8) + blue
return rgb
} else {
return 0x000000
}
}
}
public enum MainPalette: Colors {
case red = 0xFF0000ff
case white = 0xFFFFFFFF
}
public enum FeatureXPalette: Colors {
case blue = 0x024F9Eff
// case bluish = 0x024F9Eff // <- Can't do
case red = 0xFF0000ff
}
The advantage is that it doesn't allow duplicate colors (as a true enum) and also I support alpha.
As you can see, you can create multiple enums for different palettes/schemes. In the case you want views to be able to use any palette, you can just add a protocol:
protocol Color {
var color: UIColor { get }
}
extension MainPalette: Color {
var color: UIColor {
return rawValue
}
}
extension FeatureXPalette: Color {
var color: UIColor {
return rawValue
}
}
so that way you can have a function that takes in the protocol:
func printColorEquality(color1: Color, color2: Color) {
print(color1.color == color2.color)
}
let red1: Color = MainPalette.red
let red2: Color = FeatureXPalette.red
printColorEquality(color1: red1, color2: red2)
What I also like to do is add static vars for convenience:
extension MainPalette {
public static var brightRed: UIColor {
return MainPalette.red.color
}
}
that gives you a cleaner api:
view.backgroundColor = MainPalette.brightRed
Naming can be improved: you have to choose if you want a nice convenience api or nice naming for your enums.
This answer is probably late, but for others finding this question.
I was not satisfied with the answers above, since adding colors as UIColors extension is not always what you want, since:
It might not be the best solution from Software architecture perspective.
You can not use the power enums have, e.g. CaseIterable
This is the solution I came up with:
enum PencilColor {
case lightRed
case darkPurple
var associatedColor: UIColor {
switch self {
case .lightRed: return UIColor(red: 67/255, green: 173/255, blue: 247/255, alpha: 1.0)
case .darkPurple: return UIColor(red: 67/255, green: 173/255, blue: 247/255, alpha: 1.0)
}
}
}

Cannot convert value of type 'Int' to expected argument type 'ClassName'

struct ColorPalette {
var DefaultBarTintColor : UIColor = UIColorFromRGB(0x75b5d4)
func UIColorFromRGB(rgbValue: UInt) -> UIColor {
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}
}
error : Cannot convert value of type "int' to expected argument type 'MyClassName'
If I do
view.backColor = UIColorFromRGB(UIColorFromRGB)
it works
But I want to do like this:
view.backColor = ColorPalette.DefaultBarTintColor
I don't know how to fix it
To call function inside struct, you need to use static keyword. Here is the modified function of your's!
struct ColorPalette {
static var DefaultBarTintColor : UIColor = ColorPalette.UIColorFromRGB(0x75b5d4)
static func UIColorFromRGB(rgbValue: UInt) -> UIColor {
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}
}
Now you can call like this!
view.backColor = ColorPalette.DefaultBarTintColor
Although, if you need to call without using the static keyword then you need to make the object of your struct. Like:
let myColorPallete = ColorPalette()
then you can access it like this:
viewNoEvents.backgroundColor = myColorPallete.DefaultBarTintColor
Hope this helps!
Instead of initializing property while declaration, you can have initializer function as below:
struct ColorPalette {
var DefaultBarTintColor : UIColor!
func UIColorFromRGB(rgbValue: Int) -> UIColor {
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}
init(rgbValue: Int) {
self.DefaultBarTintColor = UIColorFromRGB(rgbValue)
}
}
And then create instance by passing hex value as below:
var color = ColorPalette(rgbValue: 0x75B5D4)

UIColor code in a variable in swift

I want to set the background color of view through a parameter so that when 1 want to change it I can change it. The value of the parameter is coming from database. My current code is like follows:
loadingView.backgroundColor = UIColor(red:0.99, green:0.75, blue:0.14, alpha:1.0)
I want to change the above code as follows:
loadingView.backgroundColor = UIColor(red:a, green:b, blue:c, alpha:d)
or
loadingView.backgroundColor = UIColor(hex)
Where a, b, c, d and hex are variables and all of these have a value.
If hex is an Int and not a String you can use this extension:
extension UIColor {
convenience init(hex: Int, alpha: CGFloat) {
let red = CGFloat((hex & 0xFF0000) >> 16) / 255.0
let green = CGFloat((hex & 0xFF00) >> 8) / 255.0
let blue = CGFloat((hex & 0xFF)) / 255.0
self.init(red:red, green:green, blue:blue, alpha:alpha)
}
}
Used like this:
UIColor(0x00ff00, alpha: 1.0)
UIColor(hex, alpha: 1.0)
extension String {
subscript(range: Range<Int>) -> String {
return substringWithRange(advance(startIndex,range.startIndex)..<advance(startIndex,range.endIndex))
}
}
extension UIColor {
convenience init(htmlColor:String, alpha: Double) {
self.init(red: CGFloat( strtoul(htmlColor[0...1], nil, 16) )/255.0,
green: CGFloat( strtoul(htmlColor[2...3], nil, 16) )/255.0,
blue: CGFloat( strtoul(htmlColor[4...5], nil, 16) )/255.0,
alpha: CGFloat( alpha )
)
}
}
Testing
let greenColorHEX = "00ff00"
let greenColor = UIColor(htmlColor: greenColorHEX, alpha: 1)
let a:CGFloat = 1
let b:CGFloat = 1
let c:CGFloat = 0
let d:CGFloat = 1
let yellowColor = UIColor(red: a, green: b, blue: c, alpha: d)