I have an array of colours that look like this...
var purpleShades: [(CGFloat, CGFloat, CGFloat)] = [(186.0/255.0, 85.0/255.0, 211.0/255.0), (147.0/255.0, 112.0/255.0, 219.0/255.0), (138.0/255.0, 43.0/255.0, 226.0/255.0), (148.0/255.0, 0.0/255.0, 211.0/255.0), (153.0/255.0, 50.0/255.0, 204.0/255.0), (139.0/255.0, 0.0/255.0, 139.0/255.0)]
rather than duplicate code was wondering if anyone could help convert it to UIColor, so I can use it for this piece of code.
cell.tintColor = grayShades[Int(index)]
This variation of init might help you
It accepts red, green, blue and alpha as parameters.
Here's a nice extension to UIColor:
extension UIColor {
convenience init(hex: UInt, alpha: CGFloat) {
var red, green, blue: UInt
red = ((hex & 0xFF0000) >> 16)
green = ((hex & 0x00FF00) >> 8)
blue = hex & 0x0000FF
self.init(red: CGFloat(red) / 255, green: CGFloat(green) / 255, blue: CGFloat(blue) / 255, alpha: alpha)
}
}
With that you can write:
let purple = UIColor(hex: 0x9932CC, alpha: 1)
If you have a lot of colours, another extension on UIColor gives you…
extension UIColor {
static let darkOrchid = UIColor(hex: 0x 9932CC, alpha: 1)
static let darkMagenta = UIColor(hex: 0x 8B008B, alpha: 1)
static let indigo = UIColor(hex: 0x 4B0082, alpha: 1)
}
which allows you to say, for example…
cell.tintColor = .darkOrchid
Related
is there a compiled list of hex codes or RGB values for SwiftUI colors? I'd like to know either for Color.purple but can't seem to find any good sources. Is there a way to programmatically determine the hex code or RGB value? Or maybe I can look at some properties of Color? Thanks in advance!
If you are coding in SwiftUI 2 you can convert your Color to UIColor and use getRed method to get the red, green, blue and alpha components. Once you have the components you can convert the values to hexa string:
extension Color {
var uiColor: UIColor { .init(self) }
typealias RGBA = (red: CGFloat, green: CGFloat, blue: CGFloat, alpha: CGFloat)
var rgba: RGBA? {
var (r, g, b, a): RGBA = (0, 0, 0, 0)
return uiColor.getRed(&r, green: &g, blue: &b, alpha: &a) ? (r, g, b, a) : nil
}
var hexaRGB: String? {
guard let (red, green, blue, _) = rgba else { return nil }
return String(format: "#%02x%02x%02x",
Int(red * 255),
Int(green * 255),
Int(blue * 255))
}
var hexaRGBA: String? {
guard let (red, green, blue, alpha) = rgba else { return nil }
return String(format: "#%02x%02x%02x%02x",
Int(red * 255),
Int(green * 255),
Int(blue * 255),
Int(alpha * 255))
}
}
Color.purple.hexaRGB // "#af52de"
Color.purple.hexaRGBA // "#af52deff"
if let (red, green, blue, alpha) = Color.purple.rgba {
red // 0.686274528503418
green // 0.321568638086319
blue // 0.8705882430076599
alpha // 1
}
Isn't it good to use it like this?
import UIKit
extension UIColor {
convenience init(hex:Int, alpha: CGFloat = 1.0) {
self.init(
red: CGFloat((hex & 0xFF0000) >> 16) / 255.0,
green: CGFloat((hex & 0x00FF00) >> 8) / 255.0,
blue: CGFloat((hex & 0x0000FF) >> 0) / 255.0,
alpha: alpha
)
}
}
var CodeBackground = #colorLiteral(red: 0.1294117647, green: 0.1294117647, blue: 0.1960784314, alpha: 1)
cells?.layer.borderColor = //... how can i set this color literal to cgColor?
As I know how to convert that UIColor to cgColor in the Swift
as example
UIColor.black.cgColor
Bingo, but what about Color Literal to cgColor in the Swift
Thank you.
As, You already know the simpler way of using colorLiteral as cgcolor, I'll jump to the other way of doing that...
For that you need a Custom Function which gets the color-value(red , green , blue) from the colorLiteral , which is as below
extension UIColor {
func rgb() -> (red:Int, green:Int, blue:Int, alpha:Int)? {
var fRed : CGFloat = 0
var fGreen : CGFloat = 0
var fBlue : CGFloat = 0
var fAlpha: CGFloat = 0
if self.getRed(&fRed, green: &fGreen, blue: &fBlue, alpha: &fAlpha) {
let iRed = Int(fRed * 255.0)
let iGreen = Int(fGreen * 255.0)
let iBlue = Int(fBlue * 255.0)
let iAlpha = Int(fAlpha)
_ = (iAlpha << 24) + (iRed << 16) + (iGreen << 8) + iBlue
return (red:iRed, green:iGreen, blue:iBlue, alpha:iAlpha)
} else {
// Could not extract RGBA components:
return nil
}
}
}
//It's more convenient to use function in `UIColor` extension
Now , after this function created you can convert colorliteral into cgColor as below...
let CodeBackground = #colorLiteral(red: 0.1294117647, green: 0.1294117647, blue: 0.1960784314, alpha: 1)
let rgblit = CodeBackground.rgb()
let Converted_cgColor = CGColor(srgbRed: CGFloat(integerLiteral: rgblit!.red), green: CGFloat(integerLiteral: rgblit!.green), blue: CGFloat(integerLiteral: rgblit!.blue), alpha: CGFloat(integerLiteral: rgblit!.alpha))
You can directly use Converted_cgColor like
cells?.layer.borderColor = Converted_cgColor
HOPE IT HELPS
I have multiple different UIColor objects. Some of them are initialized by a constructor some of them are shown as color literals.
static let optionsHeader = UIColor([ ]) // XCode is showing a color rect.
static let optionButtonSelected = UIColor(red: 0.865, green: 0.804, blue: 0.0, alpha: 1.0)
How can I convert the UIColor.init(...) statements to a color literal.
RGB color literal is same as UIColor initialization:
#colorLiteral(red: 1, green: 1, blue: 1, alpha: 1)
Or you can select color after typing #colorLiteral().
You can also use extensions to use hex color instead of inputting rgba values
extension UIColor {
convenience init(hexString: String, alpha: CGFloat = 1.0) {
let hexString: String = hexString.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines)
let scanner = Scanner(string: hexString)
if (hexString.hasPrefix("#")) {
scanner.scanLocation = 1
}
var color: UInt32 = 0
scanner.scanHexInt32(&color)
let mask = 0x000000FF
let r = Int(color >> 16) & mask
let g = Int(color >> 8) & mask
let b = Int(color) & mask
let red = CGFloat(r) / 255.0
let green = CGFloat(g) / 255.0
let blue = CGFloat(b) / 255.0
self.init(red:red, green:green, blue:blue, alpha:alpha)
}
func toHexString() -> String {
var r:CGFloat = 0
var g:CGFloat = 0
var b:CGFloat = 0
var a:CGFloat = 0
getRed(&r, green: &g, blue: &b, alpha: &a)
let rgb:Int = (Int)(r*255)<<16 | (Int)(g*255)<<8 | (Int)(b*255)<<0
return String(format:"#%06x", rgb)
}
}
Then you can code it with:
self.backgroundColor = UIColor(hexString: "#4A4A4A")
This question already has answers here:
How to get the RGB Code (INT) from an UIColor in Swift [duplicate]
(7 answers)
Closed 6 years ago.
I got this UIColor :
UIColor(red: 0.2, green: 0.4118, blue: 0.1176, alpha: 1.0)
And I need to convert in Uint. How can I do that?
EDIT :
func showEmailMessage(advice : String)
{
_ = SCLAlertView().showSuccess("Congratulation", subTitle: advice, closeButtonTitle: "Ok", duration : 10, colorStyle: 0x33691e, colorTextButton: 0xFFFFFF)
}
Color style field want Uint
You can make use of the UIColor.getRed(...) method to extract the colors as CGFloat, thereafter convert the values of your CGFloat triplet to the proper bit positions of an UInt32 variable.
// Example: use color triplet CC6699 "=" {204, 102, 153} (RGB triplet)
let color = UIColor(red: 204.0/255.0, green: 102.0/255.0, blue: 153.0/255.0, alpha: 1.0)
// read colors to CGFloats and convert and position to proper bit positions in UInt32
var red: CGFloat = 0, green: CGFloat = 0, blue: CGFloat = 0, alpha: CGFloat = 0
if color.getRed(&red, green: &green, blue: &blue, alpha: &alpha) {
var colorAsUInt : UInt32 = 0
colorAsUInt += UInt32(red * 255.0) << 16 +
UInt32(green * 255.0) << 8 +
UInt32(blue * 255.0)
colorAsUInt == 0xCC6699 // true
}
For details, see e.g. the Language Guide - Advanced Operators which contains, among other valuable things, an example specifically for bit shifting w.r.t RGB triplets.
I want to set the background color of view through a parameter so that when 1 want to change it I can change it. The value of the parameter is coming from database. My current code is like follows:
loadingView.backgroundColor = UIColor(red:0.99, green:0.75, blue:0.14, alpha:1.0)
I want to change the above code as follows:
loadingView.backgroundColor = UIColor(red:a, green:b, blue:c, alpha:d)
or
loadingView.backgroundColor = UIColor(hex)
Where a, b, c, d and hex are variables and all of these have a value.
If hex is an Int and not a String you can use this extension:
extension UIColor {
convenience init(hex: Int, alpha: CGFloat) {
let red = CGFloat((hex & 0xFF0000) >> 16) / 255.0
let green = CGFloat((hex & 0xFF00) >> 8) / 255.0
let blue = CGFloat((hex & 0xFF)) / 255.0
self.init(red:red, green:green, blue:blue, alpha:alpha)
}
}
Used like this:
UIColor(0x00ff00, alpha: 1.0)
UIColor(hex, alpha: 1.0)
extension String {
subscript(range: Range<Int>) -> String {
return substringWithRange(advance(startIndex,range.startIndex)..<advance(startIndex,range.endIndex))
}
}
extension UIColor {
convenience init(htmlColor:String, alpha: Double) {
self.init(red: CGFloat( strtoul(htmlColor[0...1], nil, 16) )/255.0,
green: CGFloat( strtoul(htmlColor[2...3], nil, 16) )/255.0,
blue: CGFloat( strtoul(htmlColor[4...5], nil, 16) )/255.0,
alpha: CGFloat( alpha )
)
}
}
Testing
let greenColorHEX = "00ff00"
let greenColor = UIColor(htmlColor: greenColorHEX, alpha: 1)
let a:CGFloat = 1
let b:CGFloat = 1
let c:CGFloat = 0
let d:CGFloat = 1
let yellowColor = UIColor(red: a, green: b, blue: c, alpha: d)