How to get RGB components from Color in SwiftUI - swift

If I have a SwiftUI Color:
let col: Color = Color(red: 0.5, green: 0.5, blue: 0.5)
How do I get the RGB components from col?
Like this maybe:
print(col.components.red)
In UIKit, I could use UIColor.getRed but there doesn't seem to be an equivalent in SwiftUI.

iOS 14 / macOS 10.16
There is a new initializer that takes a Color and returns a UIColor for iOS or NSColor for macOS now. With the help of those you can implement the following extensions:
iOS / macOS
import SwiftUI
#if canImport(UIKit)
import UIKit
#elseif canImport(AppKit)
import AppKit
#endif
extension Color {
var components: (red: CGFloat, green: CGFloat, blue: CGFloat, opacity: CGFloat) {
#if canImport(UIKit)
typealias NativeColor = UIColor
#elseif canImport(AppKit)
typealias NativeColor = NSColor
#endif
var r: CGFloat = 0
var g: CGFloat = 0
var b: CGFloat = 0
var o: CGFloat = 0
guard NativeColor(self).getRed(&r, green: &g, blue: &b, alpha: &o) else {
// You can handle the failure here as you want
return (0, 0, 0, 0)
}
return (r, g, b, o)
}
}
Usage
Color.red.components.red // 0.9999999403953552 // <- SwiftUI Colors are not pure!

Waiting for an API I've abused CustomStringConvertible protocol for the simple rgba case where the color description format is #rrggbbaa
debugPrint(Color.red)
debugPrint(Color(red: 1.0, green: 0.0, blue: 0.0))
debugPrint(Color(red: 1.0, green: 0.3, blue: 0.0))
debugPrint(Color(.sRGB, red: 1.0, green: 0.0, blue: 0.5, opacity: 0.3))
debugPrint(Color(hue: 1.0, saturation: 0.0, brightness: 1.0))
debugPrint(Color(.displayP3, red: 1.0, green: 0.0, blue: 0.0, opacity: 1.0).description)
red
#FF0000FF
#FF4C00FF
#FF00804D
#FFFFFFFF
"DisplayP3(red: 1.0, green: 0.0, blue: 0.0, opacity: 1.0)"
as you can see, things like Color.red just dump "red" but if you are working with
simple RGB colors generated by code (ie from a color picker) then this is not too bad
extension SwiftUI.Color {
var redComponent: Double? {
let val = description
guard val.hasPrefix("#") else { return nil }
let r1 = val.index(val.startIndex, offsetBy: 1)
let r2 = val.index(val.startIndex, offsetBy: 2)
return Double(Int(val[r1...r2], radix: 16)!) / 255.0
}
var greenComponent: Double? {
let val = description
guard val.hasPrefix("#") else { return nil }
let g1 = val.index(val.startIndex, offsetBy: 3)
let g2 = val.index(val.startIndex, offsetBy: 4)
return Double(Int(val[g1...g2], radix: 16)!) / 255.0
}
var blueComponent: Double? {
let val = description
guard val.hasPrefix("#") else { return nil }
let b1 = val.index(val.startIndex, offsetBy: 5)
let b2 = val.index(val.startIndex, offsetBy: 6)
return Double(Int(val[b1...b2], radix: 16)!) / 255.0
}
var opacityComponent: Double? {
let val = description
guard val.hasPrefix("#") else { return nil }
let b1 = val.index(val.startIndex, offsetBy: 7)
let b2 = val.index(val.startIndex, offsetBy: 8)
return Double(Int(val[b1...b2], radix: 16)!) / 255.0
}
}

Simple one-liner:
print(UIColor(Color.blue).cgColor.components)
You get an [CGFloat]? of [red, green, blue, alpha].

I have found that #Mojtaba Hosseinis answer is working fine, except when you have your colors declared inside assets with light and dark appearances.
Then I found that the dark appearance somehow gets lost when using UIColor(self). Here is a workaround I came up with:
Note, this is only for iOS since my app is iOS only, you could of course do the same as #Mojtaba Hosseini and adapt it to macOS as well.
extension Color {
var components: (r: Double, g: Double, b: Double, o: Double)? {
let uiColor: UIColor
var r: CGFloat = 0
var g: CGFloat = 0
var b: CGFloat = 0
var o: CGFloat = 0
if self.description.contains("NamedColor") {
let lowerBound = self.description.range(of: "name: \"")!.upperBound
let upperBound = self.description.range(of: "\", bundle")!.lowerBound
let assetsName = String(self.description[lowerBound..<upperBound])
uiColor = UIColor(named: assetsName)!
} else {
uiColor = UIColor(self)
}
guard uiColor.getRed(&r, green: &g, blue: &b, alpha: &o) else { return nil }
return (Double(r), Double(g), Double(b), Double(o))
}
}
The idea is to use the UIColor(named:) initializer instead, where all appearances are correct.
Fortunately, the name we set in assets is saved in the description of the Color. We only have to abstract it since there is also other information, namely bundle, etc.

Based on #Mojtaba's answer, I came up with a shorter, more flexible version:
#if canImport(UIKit)
import UIKit
#elseif canImport(AppKit)
import AppKit
#endif
extension Color {
#if canImport(UIKit)
var asNative: UIColor { UIColor(self) }
#elseif canImport(AppKit)
var asNative: NSColor { NSColor(self) }
#endif
var rgba: (red: CGFloat, green: CGFloat, blue: CGFloat, alpha: CGFloat) {
let color = asNative.usingColorSpace(.deviceRGB)!
var t = (CGFloat(), CGFloat(), CGFloat(), CGFloat())
color.getRed(&t.0, green: &t.1, blue: &t.2, alpha: &t.3)
return t
}
var hsva: (hue: CGFloat, saturation: CGFloat, value: CGFloat, alpha: CGFloat) {
let color = asNative.usingColorSpace(.deviceRGB)!
var t = (CGFloat(), CGFloat(), CGFloat(), CGFloat())
color.getHue(&t.0, saturation: &t.1, brightness: &t.2, alpha: &t.3)
return t
}
}
Doing asNative.redComponent etc. might also work, FYI.

The answer is no - there's no API do so (yet), but...
Most of SwiftUI structs have fields that are private, like in Color.
You can use Mirror to extract such informations - but keep in mind it is not efficient.
Here's how to extract the hexadecimal representation of a SwiftUI Color - for educational purpose.
Copy and paste this into a Xcode 11 playground.
import UIKit
import SwiftUI
let systemColor = Color.red
let color = Color(red: 0.3, green: 0.5, blue: 1)
extension Color {
var hexRepresentation: String? {
let children = Mirror(reflecting: color).children
let _provider = children.filter { $0.label == "provider" }.first
guard let provider = _provider?.value else {
return nil
}
let providerChildren = Mirror(reflecting: provider).children
let _base = providerChildren.filter { $0.label == "base" }.first
guard let base = _base?.value else {
return nil
}
var baseValue: String = ""
dump(base, to: &baseValue)
guard let firstLine = baseValue.split(separator: "\n").first,
let hexString = firstLine.split(separator: " ")[1] as Substring? else {
return nil
}
return hexString.trimmingCharacters(in: .newlines)
}
}
systemColor.hexRepresentation
color.hexRepresentation
Colors like .red, .white, etc., don't seem to have many information in them, when dumped.
Just their "system" name.
▿ red
▿ provider: SwiftUI.(unknown context at $1297483bc).ColorBox<SwiftUI.SystemColorType> #0
- super: SwiftUI.(unknown context at $129748300).AnyColorBox
- base: SwiftUI.SystemColorType.red
A Color instantiated with red/blue/green components does instead.
▿ #4C80FFFF
▿ provider: SwiftUI.(unknown context at $11cd2e3bc).ColorBox<SwiftUI.Color._Resolved> #0
- super: SwiftUI.(unknown context at $11cd2e300).AnyColorBox
▿ base: #4C80FFFF
- linearRed: 0.073238954
- linearGreen: 0.21404114
- linearBlue: 1.0
- opacity: 1.0
In the Playground, you will see:
systemColor.hexRepresentation returning nil
color.hexRepresentation returning "#4C80FFFF"

You can use UIColor and transform the UIColor to Color after.
Code:
extension UIColor {
func hexValue() -> String {
let values = self.cgColor.components
var outputR: Int = 0
var outputG: Int = 0
var outputB: Int = 0
var outputA: Int = 1
switch values!.count {
case 1:
outputR = Int(values![0] * 255)
outputG = Int(values![0] * 255)
outputB = Int(values![0] * 255)
outputA = 1
case 2:
outputR = Int(values![0] * 255)
outputG = Int(values![0] * 255)
outputB = Int(values![0] * 255)
outputA = Int(values![1] * 255)
case 3:
outputR = Int(values![0] * 255)
outputG = Int(values![1] * 255)
outputB = Int(values![2] * 255)
outputA = 1
case 4:
outputR = Int(values![0] * 255)
outputG = Int(values![1] * 255)
outputB = Int(values![2] * 255)
outputA = Int(values![3] * 255)
default:
break
}
return "#" + String(format:"%02X", outputR) + String(format:"%02X", outputG) + String(format:"%02X", outputB) + String(format:"%02X", outputA)
}
}

Related

How to convert Color Literal to cgColor in the Swift?

var CodeBackground = #colorLiteral(red: 0.1294117647, green: 0.1294117647, blue: 0.1960784314, alpha: 1)
cells?.layer.borderColor = //... how can i set this color literal to cgColor?
As I know how to convert that UIColor to cgColor in the Swift
as example
UIColor.black.cgColor
Bingo, but what about Color Literal to cgColor in the Swift
Thank you.
As, You already know the simpler way of using colorLiteral as cgcolor, I'll jump to the other way of doing that...
For that you need a Custom Function which gets the color-value(red , green , blue) from the colorLiteral , which is as below
extension UIColor {
func rgb() -> (red:Int, green:Int, blue:Int, alpha:Int)? {
var fRed : CGFloat = 0
var fGreen : CGFloat = 0
var fBlue : CGFloat = 0
var fAlpha: CGFloat = 0
if self.getRed(&fRed, green: &fGreen, blue: &fBlue, alpha: &fAlpha) {
let iRed = Int(fRed * 255.0)
let iGreen = Int(fGreen * 255.0)
let iBlue = Int(fBlue * 255.0)
let iAlpha = Int(fAlpha)
_ = (iAlpha << 24) + (iRed << 16) + (iGreen << 8) + iBlue
return (red:iRed, green:iGreen, blue:iBlue, alpha:iAlpha)
} else {
// Could not extract RGBA components:
return nil
}
}
}
//It's more convenient to use function in `UIColor` extension
Now , after this function created you can convert colorliteral into cgColor as below...
let CodeBackground = #colorLiteral(red: 0.1294117647, green: 0.1294117647, blue: 0.1960784314, alpha: 1)
let rgblit = CodeBackground.rgb()
let Converted_cgColor = CGColor(srgbRed: CGFloat(integerLiteral: rgblit!.red), green: CGFloat(integerLiteral: rgblit!.green), blue: CGFloat(integerLiteral: rgblit!.blue), alpha: CGFloat(integerLiteral: rgblit!.alpha))
You can directly use Converted_cgColor like
cells?.layer.borderColor = Converted_cgColor
HOPE IT HELPS

How to not cause dark gray color to be transparent removing background from image

I'm having an issue where when I try to remove the green from an image (in this case the image background) but all the dark grays (within the part of the image I want to keep) become semi-transparent. I am unsure why, would like some advice on how to:
func chromaKeyFilter(fromHue: CGFloat, toHue: CGFloat) -> CIFilter? {
let size = 64
var cubeRGB = [Float]()
for z in 0 ..< size {
let blue = CGFloat(z) / CGFloat(size-1)
for y in 0 ..< size {
let green = CGFloat(y) / CGFloat(size-1)
for x in 0 ..< size {
let red = CGFloat(x) / CGFloat(size-1)
let color = UIColor(red: red, green: green, blue: blue, alpha: 1)
let hueColor = color.hsbColor
let alpha: CGFloat = (hueColor.hue >= fromHue && hueColor.hue <= toHue) ? 0 : 1
cubeRGB.append(Float(red * alpha))
cubeRGB.append(Float(green * alpha))
cubeRGB.append(Float(blue * alpha))
cubeRGB.append(Float(alpha))
}
}
}
let data = Data(bytes: cubeRGB, count: cubeRGB.count * MemoryLayout<Float>.size)
let params: [String: Any] = ["inputCubeDimension": size, "inputCubeData": data]
return CIFilter(name: "CIColorCube", parameters: params)
}
func filterPixels(foregroundCIImage: CIImage) -> CIImage {
let chromaCIFilter = self.chromaKeyFilter(fromHue: 0.33, toHue: 0.34)
chromaCIFilter?.setValue(foregroundCIImage, forKey: kCIInputImageKey)
let sourceCIImageWithoutBackground = chromaCIFilter?.outputImage
var image = CIImage()
if let filteredImage = sourceCIImageWithoutBackground {
image = filteredImage
}
return image
}
}
extension UIColor {
/// Decomposes UIColor to its HSBA components
var hsbColor: HSBColor {
var h: CGFloat = 0, s: CGFloat = 0, b: CGFloat = 0, a: CGFloat = 0
self.getHue(&h, saturation: &s, brightness: &b, alpha: &a)
return HSBColor(hue: h, saturation: s, brightness: b, alpha: a)
}
/// Holds the CGFloat values of HSBA components of a color
public struct HSBColor {
var hue: CGFloat
var saturation: CGFloat
var brightness: CGFloat
var alpha: CGFloat
}
}
Sample image:
Your code is correct, but remember that a dark gray could really be a very dark green.
On this line:
let alpha: CGFloat = (hueColor.hue >= fromHue && hueColor.hue <= toHue) ? 0 : 1
I would take brightness/saturation into account. For example
let alpha: CGFloat = (hueColor.saturation > 0.1 && hueColor.hue >= fromHue && hueColor.hue <= toHue) ? 0 : 1

Convert UIColor initialization to Color Literal Xcode

I have multiple different UIColor objects. Some of them are initialized by a constructor some of them are shown as color literals.
static let optionsHeader = UIColor([ ]) // XCode is showing a color rect.
static let optionButtonSelected = UIColor(red: 0.865, green: 0.804, blue: 0.0, alpha: 1.0)
How can I convert the UIColor.init(...) statements to a color literal.
RGB color literal is same as UIColor initialization:
#colorLiteral(red: 1, green: 1, blue: 1, alpha: 1)
Or you can select color after typing #colorLiteral().
You can also use extensions to use hex color instead of inputting rgba values
extension UIColor {
convenience init(hexString: String, alpha: CGFloat = 1.0) {
let hexString: String = hexString.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines)
let scanner = Scanner(string: hexString)
if (hexString.hasPrefix("#")) {
scanner.scanLocation = 1
}
var color: UInt32 = 0
scanner.scanHexInt32(&color)
let mask = 0x000000FF
let r = Int(color >> 16) & mask
let g = Int(color >> 8) & mask
let b = Int(color) & mask
let red = CGFloat(r) / 255.0
let green = CGFloat(g) / 255.0
let blue = CGFloat(b) / 255.0
self.init(red:red, green:green, blue:blue, alpha:alpha)
}
func toHexString() -> String {
var r:CGFloat = 0
var g:CGFloat = 0
var b:CGFloat = 0
var a:CGFloat = 0
getRed(&r, green: &g, blue: &b, alpha: &a)
let rgb:Int = (Int)(r*255)<<16 | (Int)(g*255)<<8 | (Int)(b*255)<<0
return String(format:"#%06x", rgb)
}
}
Then you can code it with:
self.backgroundColor = UIColor(hexString: "#4A4A4A")

How can I make a Swift enum with UIColor value?

I'm making a drawing app and I would like to refer to my colors through use of an enum. For example, it would be cleaner and more convenient to use Colors.RedColor instead of typing out values every time I want that red color. However, Swift's raw value enums don't seem to accept UIColor as a type. Is there a way to do this with an enum or something similar?
I do it like this (basically using a struct as a namespace):
extension UIColor {
struct MyTheme {
static var firstColor: UIColor { return UIColor(red: 1, green: 0, blue: 0, alpha: 1) }
static var secondColor: UIColor { return UIColor(red: 0, green: 1, blue: 0, alpha: 1) }
}
}
And you use it like:
UIColor.MyTheme.firstColor
So you can have a red color inside your custom theme.
If your color isn't one of those defined by UIColor's convenience method, you can add an extension to UIColor:
extension UIColor {
static var firstColor: UIColor { return UIColor(red: 1, green: 0, blue: 0, alpha: 1) }
static var secondColor: UIColor { return UIColor(red: 0, green: 1, blue: 0, alpha: 1) }
}
// Usage
let myColor = UIColor.firstColor
I use computed properties to solve this problem, this is my code
enum MyColor {
case navigationBarBackgroundColor
case navigationTintCololr
}
extension MyColor {
var value: UIColor {
get {
switch self {
case .navigationBarBackgroundColor:
return UIColor(red: 67/255, green: 173/255, blue: 247/255, alpha: 1.0)
case .navigationTintCololr:
return UIColor.white
}
}
}
}
then I can use MyColor like this:
MyColor.navigationBarBackgroundColor.value
How can I make a Swift enum with UIColor value?
This is how you would literally make an enum with a UIColor value:
import UIKit
final class Color: UIColor, RawRepresentable, ExpressibleByStringLiteral
{
// MARK:- ExpressibleByStringLiteral
typealias StringLiteralType = String
convenience init(stringLiteral: String) {
guard let (a,r,g,b) = Color.argb(hexColor: stringLiteral) else {
assertionFailure("Invalid string")
self.init(red: 0, green: 0, blue: 0, alpha: 0)
return
}
self.init(red: r, green: g, blue: b, alpha: a)
}
// MARK:- RawRepresentable
public typealias RawValue = String
convenience init?(rawValue: RawValue) {
guard let (a,r,g,b) = Color.argb(hexColor: rawValue) else { return nil }
self.init(red: r, green: g, blue: b, alpha: a)
}
var rawValue: RawValue {
return hexString()
}
// MARK:- Private
/// Return color components in range [0,1] for hexadecimal color strings.
/// - hexColor: case-insensitive string with format RGB, RRGGBB, or AARRGGBB.
private static func argb(hexColor: String) -> (CGFloat,CGFloat,CGFloat,CGFloat)?
{
let hexAlphabet = "0123456789abcdefABCDEF"
let hex = hexColor.trimmingCharacters(in: CharacterSet(charactersIn: hexAlphabet).inverted)
var int = UInt32()
Scanner(string: hex).scanHexInt32(&int)
let a, r, g, b: UInt32
switch hex.count {
case 3: (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17) // RGB
case 6: (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF) // RRGGBB
case 8: (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF) // AARRGGBB
default: return nil
}
return (CGFloat(a)/255, CGFloat(r)/255, CGFloat(g)/255, CGFloat(b)/255)
}
private func hexString() -> String {
var red: CGFloat = 0
var green: CGFloat = 0
var blue: CGFloat = 0
var alpha: CGFloat = 0
if self.getRed(&red, green: &green, blue: &blue, alpha: &alpha) {
return String(format: "#%02X%02X%02X%02X", UInt8(red * 255), UInt8(green * 255), UInt8(blue * 255), UInt8(alpha * 255))
}
assertionFailure("Invalid colour space.")
return "#F00"
}
}
enum Colors: Color {
case red = "#F00"
// case blue = "#F00" // Raw value for enum case is not unique
}
let color3 = Color(rawValue: "#000") // RGB
let color6 = Color(rawValue: "#123456") // RRGGBB
let color8 = Color(rawValue: "#12345678") // AARRGGBB
print(Colors(rawValue:"#F00") as Any) // red
print(Colors(rawValue:"#FF0000") as Any) // red
print(Colors(rawValue:"#FFFF0000") as Any) // red
print(Colors(rawValue:"#ABC") as Any) // nil because it’s not a member of the enumeration
// print(Colors(rawValue:"#XYZ") as Any) // assertion on debug, black on release
print(Colors.red) // red
print(Colors.red.rawValue) // UIExtendedSRGBColorSpace 1 0 0 1
With help from
benhurott/UIColorExtension.md
Swift 3 UIColor utilities for random color and color from hex code
Raw type 'Bool' is not expressible by any literal
This can be done much more succinctly (and should):
extension UIColor
{
static let myColor = UIColor(displayP3Red: 0.0, green: 0.7, blue: 0.0, alpha: 1.0)
}
(Any other method that returns a UIColor is equally suitable, doesn't need to be displayP3Red)
Usage:
let someColor: UIColor = .myColor
Actually I use such implementation, it is very convenience for me because of two reason, first one I can use dex value and another all colors in constant
import UIKit
struct ColorPalette {
struct Gray {
static let Light = UIColor(netHex: 0x595959)
static let Medium = UIColor(netHex: 0x262626)
}
}
extension UIColor {
convenience init(red: Int, green: Int, blue: Int) {
assert(red >= 0 && red <= 255, "Invalid red component")
assert(green >= 0 && green <= 255, "Invalid green component")
assert(blue >= 0 && blue <= 255, "Invalid blue component")
self.init(red: CGFloat(red) / 255.0, green: CGFloat(green) / 255.0, blue: CGFloat(blue) / 255.0, alpha: 1.0)
}
convenience init(netHex: Int) {
self.init(red: (netHex >> 16) & 0xff, green: (netHex >> 8) & 0xff, blue: netHex & 0xff)
}
}
usage
let backgroundGreyColor = ColorPalette.Gray.Medium.cgColor
If you want to return multiple value then use below code...it's absolutely
working for me....
enum GetDriverStatus : String {
case ClockIn = "Clock In"
case TripStart = "Trip Start"
case BeaconTouchPlant = "Beacon Touch Plant"
case PickUp = "Pick Up"
case BeaconTouchSite = "Beacon Touch Site"
case BeaconLeftSite = "Beacon Left Site"
case DropOff = "Drop Off"
case BreakIn = "Break In"
case BreakOut = "Break Out"
case TripEnd = "Trip End"
case DayEnd = "Day End"
//case ClockOut = "Clock Out"
//Get data from ID
static var allValues: [GetDriverStatus] {
return [
.ClockIn,
.TripStart,
.BeaconTouchPlant,
.PickUp,
.BeaconTouchSite,
.BeaconLeftSite,
.DropOff,
.BreakIn,
.BreakOut,
.TripEnd,
.DayEnd
]
}
//Get Color
var colorAndStatus: (UIColor,String) {
get {
switch self {
case .ClockIn,.TripStart: //Idle
return (UIColor(red: 248/255, green: 39/255, blue: 71/255, alpha: 1.0),"Idle") //dark pink-red
case .BeaconTouchPlant,.PickUp:
return (UIColor(red: 46/255, green: 180/255, blue: 42/255, alpha: 1.0),"Picking up") //Green
case .BeaconTouchSite:
return (UIColor(red: 252/255, green: 172/255, blue: 0/255, alpha: 1.0),"On site") //orange
case .DropOff,.BeaconLeftSite:
return (UIColor(red: 12/255, green: 90/255, blue: 255/255, alpha: 1.0),"Dropping off") //blue
case .BreakIn,.BreakOut:
return (UIColor(red: 151/255, green: 151/255, blue: 151/255, alpha: 1.0),"On break") //warm-grey-two
case .TripEnd:
return (UIColor.black,"Trip end")
case .DayEnd:
return (UIColor.black,"Done for the day")
}
}
}
}
How to use this code
Passing .allvalues["index of your option"] you getting UIColor at 0 position as well as String value as 1 position
GetDriverStatus.allValues[1].colorAndStatus.0 //UIColor.Black
GetDriverStatus.allValues[2].colorAndStatus.1 //"Picking up"
Based on #Jano's answer I made an improvement by using Int as the literal type:
import UIKit
public final class Colors: UIColor {
}
extension Colors: ExpressibleByIntegerLiteral {
public typealias IntegerLiteralType = Int
public convenience init(integerLiteral value: Int) {
let red = CGFloat((value & 0xFF0000FF) >> 24) / 0xFF
let green = CGFloat((value & 0x00FF00FF) >> 16) / 0xFF
let blue = CGFloat((value & 0x0000FFFF) >> 8) / 0xFF
let alpha = CGFloat(value & 0x00FF00FF) / 0xFF
self.init(red: red, green: green, blue: blue, alpha: alpha)
}
}
extension Colors: RawRepresentable {
public typealias RawValue = Int
public var rawValue: RawValue {
return hex
}
public convenience init?(rawValue: RawValue) {
self.init(integerLiteral: rawValue)
}
}
fileprivate extension UIColor {
var hex: Int {
var fRed: CGFloat = 0
var fGreen: CGFloat = 0
var fBlue: CGFloat = 0
var fAlpha: CGFloat = 0
if self.getRed(&fRed, green: &fGreen, blue: &fBlue, alpha: &fAlpha) {
let red = Int(fRed * 255.0)
let green = Int(fGreen * 255.0)
let blue = Int(fBlue * 255.0)
let alpha = Int(fAlpha * 255.0)
let rgb = (alpha << 24) + (red << 16) + (green << 8) + blue
return rgb
} else {
return 0x000000
}
}
}
public enum MainPalette: Colors {
case red = 0xFF0000ff
case white = 0xFFFFFFFF
}
public enum FeatureXPalette: Colors {
case blue = 0x024F9Eff
// case bluish = 0x024F9Eff // <- Can't do
case red = 0xFF0000ff
}
The advantage is that it doesn't allow duplicate colors (as a true enum) and also I support alpha.
As you can see, you can create multiple enums for different palettes/schemes. In the case you want views to be able to use any palette, you can just add a protocol:
protocol Color {
var color: UIColor { get }
}
extension MainPalette: Color {
var color: UIColor {
return rawValue
}
}
extension FeatureXPalette: Color {
var color: UIColor {
return rawValue
}
}
so that way you can have a function that takes in the protocol:
func printColorEquality(color1: Color, color2: Color) {
print(color1.color == color2.color)
}
let red1: Color = MainPalette.red
let red2: Color = FeatureXPalette.red
printColorEquality(color1: red1, color2: red2)
What I also like to do is add static vars for convenience:
extension MainPalette {
public static var brightRed: UIColor {
return MainPalette.red.color
}
}
that gives you a cleaner api:
view.backgroundColor = MainPalette.brightRed
Naming can be improved: you have to choose if you want a nice convenience api or nice naming for your enums.
This answer is probably late, but for others finding this question.
I was not satisfied with the answers above, since adding colors as UIColors extension is not always what you want, since:
It might not be the best solution from Software architecture perspective.
You can not use the power enums have, e.g. CaseIterable
This is the solution I came up with:
enum PencilColor {
case lightRed
case darkPurple
var associatedColor: UIColor {
switch self {
case .lightRed: return UIColor(red: 67/255, green: 173/255, blue: 247/255, alpha: 1.0)
case .darkPurple: return UIColor(red: 67/255, green: 173/255, blue: 247/255, alpha: 1.0)
}
}
}

Computing complementary, triadic, tetradic, and analagous colors

I have created swift functions, where I send color value to and want to return triadic and tetrads values. It sort of works, but I am not happy about the color results. Can anyone help me to fine-tune the formula please?
I was following few sources, but the returned colours were too bright or saturated in comparison to several online web based color schemes. I know it's a matter of preference as well and I kinda like the results of the code below, but in some instances of colors the result of one color returned is way too close to the original one, so it's barely visible. It applies only to a few colors...
I was using the formula from here:
my code:
func getTriadColor(color: UIColor) -> (UIColor, UIColor){
var hue : CGFloat = 0
var saturation : CGFloat = 0
var brightness : CGFloat = 0
var alpha : CGFloat = 0
let triadHue = CGFloat(color.getHue(&hue, saturation: &saturation, brightness: &brightness, alpha: &alpha))
let triadColor1 = UIColor(hue: (triadHue + 0.33) - 1.0, saturation: saturation, brightness: brightness, alpha: alpha)
let triadColor2 = UIColor(hue: (triadHue + 0.66) - 1.0, saturation: saturation, brightness: brightness, alpha: alpha)
return (triadColor1, triadColor2)
}
func getTetradColor(color: UIColor) -> (UIColor, UIColor, UIColor){
var hue : CGFloat = 0
var saturation : CGFloat = 0
var brightness : CGFloat = 0
var alpha : CGFloat = 0
let tetradHue = CGFloat(color.getHue(&hue, saturation: &saturation, brightness: &brightness, alpha: &alpha))
let tetradColor1 = UIColor(hue: (tetradHue + 0.25) - 1.0, saturation: saturation, brightness: brightness, alpha: alpha)
let tetradColor2 = UIColor(hue: (tetradHue + 0.5) - 1.0, saturation: saturation, brightness: brightness, alpha: alpha)
let tetradColor3 = UIColor(hue: (tetradHue + 0.75) - 1.0, saturation: saturation, brightness: brightness, alpha: alpha)
return (tetradColor1, tetradColor2, tetradColor3)
}
And I also found nice clean code for finding complementary color, which I am very happy about the results
func getComplementColor(color: UIColor) -> UIColor{
let ciColor = CIColor(color: color)
let compRed: CGFloat = 1.0 - ciColor.red
let compGreen: CGFloat = 1.0 - ciColor.green
let compBlue: CGFloat = 1.0 - ciColor.blue
return UIColor(red: compRed, green: compGreen, blue: compBlue, alpha: 1.0)
}
Your screen shot is of this web page. (Wayback Machine link because, six years later, the page has been deleted.) The formulas on that page are incorrect, because they specify the use of the absolute value function instead of the modulo function. That is, for example, your screen shot defines
H1 = |(H0 + 180°) - 360°|
but consider what this gives for the input H0 = 90°:
H1 = |(90° + 180°) - 360°| = |270° - 360°| = |-90°| = 90°
Do you think that the complementary hue of H0 = 90° is H1 = 90°, the same hue?
The correct formula is
H1 = (H0 + 180°) mod 360°
where “mod” is short for “modulo” and means “the remainder after dividing by”. In other words, if the answer would be above 360°, subtract 360°. For H0 = 90°, this gives the correct answer of H1 = 270°.
But you don't even have this problem in your code, because you didn't use the absolute value function (or the modulo function) in your code. Since you're not doing anything to keep your hue values in the range 0…1, your hue values that are less than zero are clipped to zero, and your hue values above one are clipped to one (and both zero and one mean red).
Your getComplementColor is also not at all the standard definition of the “complementary color”.
Here are the correct definitions:
extension UIColor {
var complement: UIColor {
return self.withHueOffset(0.5)
}
var splitComplement0: UIColor {
return self.withHueOffset(150 / 360)
}
var splitComplement1: UIColor {
return self.withHueOffset(210 / 360)
}
var triadic0: UIColor {
return self.withHueOffset(120 / 360)
}
var triadic1: UIColor {
return self.withHueOffset(240 / 360)
}
var tetradic0: UIColor {
return self.withHueOffset(0.25)
}
var tetradic1: UIColor {
return self.complement
}
var tetradic2: UIColor {
return self.withHueOffset(0.75)
}
var analagous0: UIColor {
return self.withHueOffset(-1 / 12)
}
var analagous1: UIColor {
return self.withHueOffset(1 / 12)
}
func withHueOffset(offset: CGFloat) -> UIColor {
var h: CGFloat = 0
var s: CGFloat = 0
var b: CGFloat = 0
var a: CGFloat = 0
self.getHue(&h, saturation: &s, brightness: &b, alpha: &a)
return UIColor(hue: fmod(h + offset, 1), saturation: s, brightness: b, alpha: a)
}
}
Here are some examples of complementary colors (original on top, complementary beneath):
Here are split complementary colors (original on top):
Here are triadic colors (original on top):
Here are tetradic colors (original on top):
Here are analagous colors (original in the middle):
Here is the playground I used to generate those images:
import XCPlayground
import UIKit
let view = UIView(frame: CGRectMake(0, 0, 320, 480))
view.backgroundColor = [#Color(colorLiteralRed: 0.9607843137254902, green: 0.9607843137254902, blue: 0.9607843137254902, alpha: 1)#]
let vStack = UIStackView(frame: view.bounds)
vStack.autoresizingMask = [ .FlexibleWidth, .FlexibleHeight ]
view.addSubview(vStack)
vStack.axis = .Vertical
vStack.distribution = .FillEqually
vStack.alignment = .Fill
vStack.spacing = 10
typealias ColorTransform = (UIColor) -> UIColor
func tile(color color: UIColor) -> UIView {
let view = UIView()
view.translatesAutoresizingMaskIntoConstraints = false
view.backgroundColor = color
return view
}
func strip(transforms: [ColorTransform]) -> UIStackView {
let strip = UIStackView()
strip.translatesAutoresizingMaskIntoConstraints = false
strip.axis = .Vertical
strip.distribution = .FillEqually
strip.alignment = .Fill
strip.spacing = 0
let hStacks = (0 ..< transforms.count).map { (i: Int) -> UIStackView in
let stack = UIStackView()
stack.translatesAutoresizingMaskIntoConstraints = false
stack.axis = .Horizontal
stack.distribution = .FillEqually
stack.alignment = .Fill
stack.spacing = 4
strip.addArrangedSubview(stack)
return stack
}
for h in 0 ..< 10 {
let hue = CGFloat(h) / 10
let color = UIColor(hue: hue, saturation: 1, brightness: 1, alpha: 1)
for (i, transform) in transforms.enumerate() {
hStacks[i].addArrangedSubview(tile(color: transform(color)))
}
}
return strip
}
vStack.addArrangedSubview(strip([
{ $0 },
{ $0.complement }]))
vStack.addArrangedSubview(strip([
{ $0 },
{ $0.splitComplement0 },
{ $0.splitComplement1 }]))
vStack.addArrangedSubview(strip([
{ $0 },
{ $0.triadic0 },
{ $0.triadic1 }]))
vStack.addArrangedSubview(strip([
{ $0 },
{ $0.tetradic0 },
{ $0.tetradic1 },
{ $0.tetradic2 }]))
vStack.addArrangedSubview(strip([
{ $0.analagous0 },
{ $0 },
{ $0.analagous1 }]))
XCPlaygroundPage.currentPage.liveView = view