Convert UIColor initialization to Color Literal Xcode - swift

I have multiple different UIColor objects. Some of them are initialized by a constructor some of them are shown as color literals.
static let optionsHeader = UIColor([ ]) // XCode is showing a color rect.
static let optionButtonSelected = UIColor(red: 0.865, green: 0.804, blue: 0.0, alpha: 1.0)
How can I convert the UIColor.init(...) statements to a color literal.

RGB color literal is same as UIColor initialization:
#colorLiteral(red: 1, green: 1, blue: 1, alpha: 1)
Or you can select color after typing #colorLiteral().

You can also use extensions to use hex color instead of inputting rgba values
extension UIColor {
convenience init(hexString: String, alpha: CGFloat = 1.0) {
let hexString: String = hexString.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines)
let scanner = Scanner(string: hexString)
if (hexString.hasPrefix("#")) {
scanner.scanLocation = 1
}
var color: UInt32 = 0
scanner.scanHexInt32(&color)
let mask = 0x000000FF
let r = Int(color >> 16) & mask
let g = Int(color >> 8) & mask
let b = Int(color) & mask
let red = CGFloat(r) / 255.0
let green = CGFloat(g) / 255.0
let blue = CGFloat(b) / 255.0
self.init(red:red, green:green, blue:blue, alpha:alpha)
}
func toHexString() -> String {
var r:CGFloat = 0
var g:CGFloat = 0
var b:CGFloat = 0
var a:CGFloat = 0
getRed(&r, green: &g, blue: &b, alpha: &a)
let rgb:Int = (Int)(r*255)<<16 | (Int)(g*255)<<8 | (Int)(b*255)<<0
return String(format:"#%06x", rgb)
}
}
Then you can code it with:
self.backgroundColor = UIColor(hexString: "#4A4A4A")

Related

How to save a Color in the #AppStorage in SwiftUi

I want to save a Color value in the #AppStorage but I get an error. How can I fix it?
#AppStorage ("selectedColor") var selectedColor: Color = .teal
save color as string
#AppStorage ("selectedColor") var selectedColor: String = ""
create a function that transforms the color into a string
func updateCardColorInAppStorage(color: Color)-> String {
let uiColor = UIColor(color)
var red: CGFloat = 0
var green: CGFloat = 0
var blue: CGFloat = 0
var alpha: CGFloat = 0
uiColor.getRed(&red, green: &green, blue: &blue, alpha: &alpha)
return "\(red),\(green),\(blue),\(alpha)"
}
and String to Color
if (cardColor != "" ) {
let rgbArray = cardColor.components(separatedBy: ",")
if let red = Double (rgbArray[0]), let green = Double (rgbArray[1]), let blue = Double(rgbArray[2]), let alpha = Double (rgbArray[3]){ bgColor = Color(.sRGB, red: red, green: green, blue: blue, opacity: alpha)
}

How to convert Color Literal to cgColor in the Swift?

var CodeBackground = #colorLiteral(red: 0.1294117647, green: 0.1294117647, blue: 0.1960784314, alpha: 1)
cells?.layer.borderColor = //... how can i set this color literal to cgColor?
As I know how to convert that UIColor to cgColor in the Swift
as example
UIColor.black.cgColor
Bingo, but what about Color Literal to cgColor in the Swift
Thank you.
As, You already know the simpler way of using colorLiteral as cgcolor, I'll jump to the other way of doing that...
For that you need a Custom Function which gets the color-value(red , green , blue) from the colorLiteral , which is as below
extension UIColor {
func rgb() -> (red:Int, green:Int, blue:Int, alpha:Int)? {
var fRed : CGFloat = 0
var fGreen : CGFloat = 0
var fBlue : CGFloat = 0
var fAlpha: CGFloat = 0
if self.getRed(&fRed, green: &fGreen, blue: &fBlue, alpha: &fAlpha) {
let iRed = Int(fRed * 255.0)
let iGreen = Int(fGreen * 255.0)
let iBlue = Int(fBlue * 255.0)
let iAlpha = Int(fAlpha)
_ = (iAlpha << 24) + (iRed << 16) + (iGreen << 8) + iBlue
return (red:iRed, green:iGreen, blue:iBlue, alpha:iAlpha)
} else {
// Could not extract RGBA components:
return nil
}
}
}
//It's more convenient to use function in `UIColor` extension
Now , after this function created you can convert colorliteral into cgColor as below...
let CodeBackground = #colorLiteral(red: 0.1294117647, green: 0.1294117647, blue: 0.1960784314, alpha: 1)
let rgblit = CodeBackground.rgb()
let Converted_cgColor = CGColor(srgbRed: CGFloat(integerLiteral: rgblit!.red), green: CGFloat(integerLiteral: rgblit!.green), blue: CGFloat(integerLiteral: rgblit!.blue), alpha: CGFloat(integerLiteral: rgblit!.alpha))
You can directly use Converted_cgColor like
cells?.layer.borderColor = Converted_cgColor
HOPE IT HELPS

How to get RGB components from Color in SwiftUI

If I have a SwiftUI Color:
let col: Color = Color(red: 0.5, green: 0.5, blue: 0.5)
How do I get the RGB components from col?
Like this maybe:
print(col.components.red)
In UIKit, I could use UIColor.getRed but there doesn't seem to be an equivalent in SwiftUI.
iOS 14 / macOS 10.16
There is a new initializer that takes a Color and returns a UIColor for iOS or NSColor for macOS now. With the help of those you can implement the following extensions:
iOS / macOS
import SwiftUI
#if canImport(UIKit)
import UIKit
#elseif canImport(AppKit)
import AppKit
#endif
extension Color {
var components: (red: CGFloat, green: CGFloat, blue: CGFloat, opacity: CGFloat) {
#if canImport(UIKit)
typealias NativeColor = UIColor
#elseif canImport(AppKit)
typealias NativeColor = NSColor
#endif
var r: CGFloat = 0
var g: CGFloat = 0
var b: CGFloat = 0
var o: CGFloat = 0
guard NativeColor(self).getRed(&r, green: &g, blue: &b, alpha: &o) else {
// You can handle the failure here as you want
return (0, 0, 0, 0)
}
return (r, g, b, o)
}
}
Usage
Color.red.components.red // 0.9999999403953552 // <- SwiftUI Colors are not pure!
Waiting for an API I've abused CustomStringConvertible protocol for the simple rgba case where the color description format is #rrggbbaa
debugPrint(Color.red)
debugPrint(Color(red: 1.0, green: 0.0, blue: 0.0))
debugPrint(Color(red: 1.0, green: 0.3, blue: 0.0))
debugPrint(Color(.sRGB, red: 1.0, green: 0.0, blue: 0.5, opacity: 0.3))
debugPrint(Color(hue: 1.0, saturation: 0.0, brightness: 1.0))
debugPrint(Color(.displayP3, red: 1.0, green: 0.0, blue: 0.0, opacity: 1.0).description)
red
#FF0000FF
#FF4C00FF
#FF00804D
#FFFFFFFF
"DisplayP3(red: 1.0, green: 0.0, blue: 0.0, opacity: 1.0)"
as you can see, things like Color.red just dump "red" but if you are working with
simple RGB colors generated by code (ie from a color picker) then this is not too bad
extension SwiftUI.Color {
var redComponent: Double? {
let val = description
guard val.hasPrefix("#") else { return nil }
let r1 = val.index(val.startIndex, offsetBy: 1)
let r2 = val.index(val.startIndex, offsetBy: 2)
return Double(Int(val[r1...r2], radix: 16)!) / 255.0
}
var greenComponent: Double? {
let val = description
guard val.hasPrefix("#") else { return nil }
let g1 = val.index(val.startIndex, offsetBy: 3)
let g2 = val.index(val.startIndex, offsetBy: 4)
return Double(Int(val[g1...g2], radix: 16)!) / 255.0
}
var blueComponent: Double? {
let val = description
guard val.hasPrefix("#") else { return nil }
let b1 = val.index(val.startIndex, offsetBy: 5)
let b2 = val.index(val.startIndex, offsetBy: 6)
return Double(Int(val[b1...b2], radix: 16)!) / 255.0
}
var opacityComponent: Double? {
let val = description
guard val.hasPrefix("#") else { return nil }
let b1 = val.index(val.startIndex, offsetBy: 7)
let b2 = val.index(val.startIndex, offsetBy: 8)
return Double(Int(val[b1...b2], radix: 16)!) / 255.0
}
}
Simple one-liner:
print(UIColor(Color.blue).cgColor.components)
You get an [CGFloat]? of [red, green, blue, alpha].
I have found that #Mojtaba Hosseinis answer is working fine, except when you have your colors declared inside assets with light and dark appearances.
Then I found that the dark appearance somehow gets lost when using UIColor(self). Here is a workaround I came up with:
Note, this is only for iOS since my app is iOS only, you could of course do the same as #Mojtaba Hosseini and adapt it to macOS as well.
extension Color {
var components: (r: Double, g: Double, b: Double, o: Double)? {
let uiColor: UIColor
var r: CGFloat = 0
var g: CGFloat = 0
var b: CGFloat = 0
var o: CGFloat = 0
if self.description.contains("NamedColor") {
let lowerBound = self.description.range(of: "name: \"")!.upperBound
let upperBound = self.description.range(of: "\", bundle")!.lowerBound
let assetsName = String(self.description[lowerBound..<upperBound])
uiColor = UIColor(named: assetsName)!
} else {
uiColor = UIColor(self)
}
guard uiColor.getRed(&r, green: &g, blue: &b, alpha: &o) else { return nil }
return (Double(r), Double(g), Double(b), Double(o))
}
}
The idea is to use the UIColor(named:) initializer instead, where all appearances are correct.
Fortunately, the name we set in assets is saved in the description of the Color. We only have to abstract it since there is also other information, namely bundle, etc.
Based on #Mojtaba's answer, I came up with a shorter, more flexible version:
#if canImport(UIKit)
import UIKit
#elseif canImport(AppKit)
import AppKit
#endif
extension Color {
#if canImport(UIKit)
var asNative: UIColor { UIColor(self) }
#elseif canImport(AppKit)
var asNative: NSColor { NSColor(self) }
#endif
var rgba: (red: CGFloat, green: CGFloat, blue: CGFloat, alpha: CGFloat) {
let color = asNative.usingColorSpace(.deviceRGB)!
var t = (CGFloat(), CGFloat(), CGFloat(), CGFloat())
color.getRed(&t.0, green: &t.1, blue: &t.2, alpha: &t.3)
return t
}
var hsva: (hue: CGFloat, saturation: CGFloat, value: CGFloat, alpha: CGFloat) {
let color = asNative.usingColorSpace(.deviceRGB)!
var t = (CGFloat(), CGFloat(), CGFloat(), CGFloat())
color.getHue(&t.0, saturation: &t.1, brightness: &t.2, alpha: &t.3)
return t
}
}
Doing asNative.redComponent etc. might also work, FYI.
The answer is no - there's no API do so (yet), but...
Most of SwiftUI structs have fields that are private, like in Color.
You can use Mirror to extract such informations - but keep in mind it is not efficient.
Here's how to extract the hexadecimal representation of a SwiftUI Color - for educational purpose.
Copy and paste this into a Xcode 11 playground.
import UIKit
import SwiftUI
let systemColor = Color.red
let color = Color(red: 0.3, green: 0.5, blue: 1)
extension Color {
var hexRepresentation: String? {
let children = Mirror(reflecting: color).children
let _provider = children.filter { $0.label == "provider" }.first
guard let provider = _provider?.value else {
return nil
}
let providerChildren = Mirror(reflecting: provider).children
let _base = providerChildren.filter { $0.label == "base" }.first
guard let base = _base?.value else {
return nil
}
var baseValue: String = ""
dump(base, to: &baseValue)
guard let firstLine = baseValue.split(separator: "\n").first,
let hexString = firstLine.split(separator: " ")[1] as Substring? else {
return nil
}
return hexString.trimmingCharacters(in: .newlines)
}
}
systemColor.hexRepresentation
color.hexRepresentation
Colors like .red, .white, etc., don't seem to have many information in them, when dumped.
Just their "system" name.
▿ red
▿ provider: SwiftUI.(unknown context at $1297483bc).ColorBox<SwiftUI.SystemColorType> #0
- super: SwiftUI.(unknown context at $129748300).AnyColorBox
- base: SwiftUI.SystemColorType.red
A Color instantiated with red/blue/green components does instead.
▿ #4C80FFFF
▿ provider: SwiftUI.(unknown context at $11cd2e3bc).ColorBox<SwiftUI.Color._Resolved> #0
- super: SwiftUI.(unknown context at $11cd2e300).AnyColorBox
▿ base: #4C80FFFF
- linearRed: 0.073238954
- linearGreen: 0.21404114
- linearBlue: 1.0
- opacity: 1.0
In the Playground, you will see:
systemColor.hexRepresentation returning nil
color.hexRepresentation returning "#4C80FFFF"
You can use UIColor and transform the UIColor to Color after.
Code:
extension UIColor {
func hexValue() -> String {
let values = self.cgColor.components
var outputR: Int = 0
var outputG: Int = 0
var outputB: Int = 0
var outputA: Int = 1
switch values!.count {
case 1:
outputR = Int(values![0] * 255)
outputG = Int(values![0] * 255)
outputB = Int(values![0] * 255)
outputA = 1
case 2:
outputR = Int(values![0] * 255)
outputG = Int(values![0] * 255)
outputB = Int(values![0] * 255)
outputA = Int(values![1] * 255)
case 3:
outputR = Int(values![0] * 255)
outputG = Int(values![1] * 255)
outputB = Int(values![2] * 255)
outputA = 1
case 4:
outputR = Int(values![0] * 255)
outputG = Int(values![1] * 255)
outputB = Int(values![2] * 255)
outputA = Int(values![3] * 255)
default:
break
}
return "#" + String(format:"%02X", outputR) + String(format:"%02X", outputG) + String(format:"%02X", outputB) + String(format:"%02X", outputA)
}
}

Is the method of using the | bitwise operator to mix two colours correct?

I am building an app that allows users to select two colors and see the result of mixing them. For example, the user selects red (#ff0000) and blue (#0000ff) and the result is purple (#ff00ff).
I started trying by writing 3 methods in a UIColor extension:
hexColor converts an Int to a color
intValue returns the color's integer representation. i.e. the reverse of hexColor
hexDescription returns the string representation of the color, such as "#ff00ff"
Here are the implementations, just in case this is needed:
public static func hexColor(hex: Int32) -> UIColor {
return UIColor.init(red: CGFloat((hex>>16)&0xFF) / 255.0, green: CGFloat((hex>>8)&0xFF) / 255.0, blue: CGFloat(hex&0xFF) / 255.0, alpha: 1.0)
}
public func intValue() -> Int {
var hexString = self.hexDescription()
hexString = hexString.substringFromIndex(hexString.startIndex.successor())
return Int(hexString, radix: 16)!
}
public func hexDescription() -> String {
var rF: CGFloat = 0,
gF: CGFloat = 0,
bF: CGFloat = 0,
aF: CGFloat = 0
self.getRed(&rF, green: &gF, blue: &bF, alpha: &aF)
let r = Int(rF * 255.0)
let g = Int(gF * 255.0)
let b = Int(bF * 255.0)
return "#" + String(format: "%02x%02x%02x", r, g, b)
}
Then I thought about how can I actually mix the colors. My first try is to get the average of the HSV values:
public func mixWith(color: UIColor) -> UIColor {
var myHue: CGFloat = 0
var mySat: CGFloat = 0
var myVal: CGFloat = 0
var otherHue: CGFloat = 0
var otherSat: CGFloat = 0
var otherVal: CGFloat = 0
self.getHue(&myHue, saturation: &mySat, brightness: &myVal, alpha: nil)
color.getHue(&otherHue, saturation: &otherSat, brightness: &otherVal, alpha: nil)
let averageHue = (myHue + otherHue) / 2.0
let averageSat = (mySat + otherSat) / 2.0
let averageVal = (myVal + otherVal) / 2.0
return UIColor(hue: averageHue, saturation: averageSat, brightness: averageVal, alpha: 1.0)
}
But this failed. When I mix blue and yellow, I get #00ff7f but it should be white.
Then I try to get the average of the int values:
public func mixWith2(color: UIColor) -> UIColor {
let average = (self.intValue() + color.intValue()) / 2
return UIColor.hexColor(Int32(average))
}
But again, blue mix with yellow is not white using the above method.
At the end, I decided to use bitwise operators. I tested |, & and ^. Surprisingly, this returns white!
UIColor.hexColor(Int32(UIColor.blueColor().intValue() |
UIColor.yellowColor().intValue()))
and so does this:
UIColor.hexColor(Int32(UIColor.blueColor().intValue() ^
UIColor.yellowColor().intValue()))
I did some other tests and this method passed all of them!
Is this a correct (gives correct result all the time) method of mixing two colors? If yes, how does it work?
The short answer is "no you can't" as in the comments, perhaps the easiest way of blending two colors is using a weighted average:
extension UIColor {
func blend(rhs:UIColor, midpoint left:CGFloat = 0.50) -> NSColor {
let right = 1.0 - left
var lr : CGFloat = 0
var lg : CGFloat = 0
var lb : CGFloat = 0
var la : CGFloat = 0
getRed(&lr, green: &lg, blue: &lb, alpha: &la)
var rr : CGFloat = 0
var rg : CGFloat = 0
var rb : CGFloat = 0
var ra : CGFloat = 0
rhs.getRed(&rr, green: &rg, blue: &rb, alpha: &ra)
return UIColor(
red: lr * left + rr * right,
green: lg * left + rg * right,
blue: lb * left + rb * right,
alpha: la * left + ra * right
)
}
}

UIColor code in a variable in swift

I want to set the background color of view through a parameter so that when 1 want to change it I can change it. The value of the parameter is coming from database. My current code is like follows:
loadingView.backgroundColor = UIColor(red:0.99, green:0.75, blue:0.14, alpha:1.0)
I want to change the above code as follows:
loadingView.backgroundColor = UIColor(red:a, green:b, blue:c, alpha:d)
or
loadingView.backgroundColor = UIColor(hex)
Where a, b, c, d and hex are variables and all of these have a value.
If hex is an Int and not a String you can use this extension:
extension UIColor {
convenience init(hex: Int, alpha: CGFloat) {
let red = CGFloat((hex & 0xFF0000) >> 16) / 255.0
let green = CGFloat((hex & 0xFF00) >> 8) / 255.0
let blue = CGFloat((hex & 0xFF)) / 255.0
self.init(red:red, green:green, blue:blue, alpha:alpha)
}
}
Used like this:
UIColor(0x00ff00, alpha: 1.0)
UIColor(hex, alpha: 1.0)
extension String {
subscript(range: Range<Int>) -> String {
return substringWithRange(advance(startIndex,range.startIndex)..<advance(startIndex,range.endIndex))
}
}
extension UIColor {
convenience init(htmlColor:String, alpha: Double) {
self.init(red: CGFloat( strtoul(htmlColor[0...1], nil, 16) )/255.0,
green: CGFloat( strtoul(htmlColor[2...3], nil, 16) )/255.0,
blue: CGFloat( strtoul(htmlColor[4...5], nil, 16) )/255.0,
alpha: CGFloat( alpha )
)
}
}
Testing
let greenColorHEX = "00ff00"
let greenColor = UIColor(htmlColor: greenColorHEX, alpha: 1)
let a:CGFloat = 1
let b:CGFloat = 1
let c:CGFloat = 0
let d:CGFloat = 1
let yellowColor = UIColor(red: a, green: b, blue: c, alpha: d)