Swift - using #define in Swift [duplicate] - iphone

This question already has answers here:
How can I use UIColorFromRGB in Swift?
(22 answers)
Closed 8 years ago.
I have a Common.h with the following defined:
// Get UIColor from Hex value
#define UIColorFromRGB(rgbValue) [UIColor colorWithRed:((float)((rgbValue & 0xFF0000) >> 16))/255.0 green:((float)((rgbValue & 0xFF00) >> 8))/255.0 blue:((float)(rgbValue & 0xFF))/255.0 alpha:1.0]
#define CellTextColor UIColorFromRGB((uint32_t) 0x217EB5)
I want to be able to use CellTextColor for the coloring in Swift. But it's not allowing me to run the macros. Only when I have #define value #"string" it works in Swift.
Is there something else I need to do here?
I read the below links:
How to use Objective-C code with #define macros in Swift
https://developer.apple.com/library/prerelease/ios/documentation/Swift/Conceptual/BuildingCocoaApps/InteractingWithCAPIs.html

The specific answer is probably an extension on UIColor:
extension UIColor {
class func fromRGB(rgb:UInt32) -> UIColor {
return UIColor(
red: CGFloat((rgb & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgb & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgb & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}
class var cellTextColor: UIColor {
return UIColor.fromRGB(0x217eb5)
}
}
Which allows you to use:
let color = UIColor.cellTextColor
The more generic answer is that it's all going to be very dependent on what the macro is and how it winds up being used.

Related

Swift 5.4 hex to NSColor

I am developing a program for macOS.
I need to convert a hex color to NSColor.
I looked at the proposed solutions here:
Convert Hex Color Code to NSColor
How to convert hex to NSColor?
But none of it works correctly with Xcode 12.5.1.
At the moment I did this, it works correctly:
extension NSObject {
func RGB(r:CGFloat, g:CGFloat, b:CGFloat, alpha:CGFloat? = 1) -> NSColor {
return NSColor(red: r/255, green: g/255, blue: b/255, alpha: alpha!)
}
}
let fillColor = RGB(r: 33, g: 150, b: 243)
Possibly not having to use Cocoa.
I would like a function like this: hexToNSColor("#2196f3")
Can you give me a hand?
you could try something like this:
EDIT: included toHex(alpha:), from code I probably got from the net somewhere many years ago.
EDIT3,4: included the case for #RRGGBBAA
EDIT 5: stripping blank spaces in the hex string, to make NSColor (hex:" # 2196f380 ") work as well.
extension NSColor {
convenience init(hex: String) {
let trimHex = hex.trimmingCharacters(in: .whitespacesAndNewlines)
let dropHash = String(trimHex.dropFirst()).trimmingCharacters(in: .whitespacesAndNewlines)
let hexString = trimHex.starts(with: "#") ? dropHash : trimHex
let ui64 = UInt64(hexString, radix: 16)
let value = ui64 != nil ? Int(ui64!) : 0
// #RRGGBB
var components = (
R: CGFloat((value >> 16) & 0xff) / 255,
G: CGFloat((value >> 08) & 0xff) / 255,
B: CGFloat((value >> 00) & 0xff) / 255,
a: CGFloat(1)
)
if String(hexString).count == 8 {
// #RRGGBBAA
components = (
R: CGFloat((value >> 24) & 0xff) / 255,
G: CGFloat((value >> 16) & 0xff) / 255,
B: CGFloat((value >> 08) & 0xff) / 255,
a: CGFloat((value >> 00) & 0xff) / 255
)
}
self.init(red: components.R, green: components.G, blue: components.B, alpha: components.a)
}
func toHex(alpha: Bool = false) -> String? {
guard let components = cgColor.components, components.count >= 3 else {
return nil
}
let r = Float(components[0])
let g = Float(components[1])
let b = Float(components[2])
var a = Float(1.0)
if components.count >= 4 {
a = Float(components[3])
}
if alpha {
return String(format: "%02lX%02lX%02lX%02lX", lroundf(r * 255), lroundf(g * 255), lroundf(b * 255), lroundf(a * 255))
} else {
return String(format: "%02lX%02lX%02lX", lroundf(r * 255), lroundf(g * 255), lroundf(b * 255))
}
}
}
let nscol = NSColor(hex: "#2196f3") // <-- with or without #
EDIT2:
you can do the same for UIColor, and for Color (with UIColor or NSColor):
extension Color {
public init(hex: String) {
self.init(UIColor(hex: hex))
}
public func toHex(alpha: Bool = false) -> String? {
UIColor(self).toHex(alpha: alpha)
}
}
/*
// With hash
let color: NSColor = NSColor(hexString: "#ff8942")
// Without hash, with alpha
let secondColor: NSColor = NSColor(hexString: "ff8942", alpha: 0.5)
// Short handling
let shortColorWithHex: NSColor = NSColor(hexString: "fff")
// From a real hex value (an `Int`)
// With hash
let color: NSColor = NSColor(hex: 0xff8942)
// Without hash, with alpha
let secondColor: NSColor = NSColor(hex: 0xff8942, alpha: 0.5)
*/
#if os(iOS) || os(tvOS) || os(watchOS)
import UIKit
typealias SWColor = UIColor
#else
import Cocoa
typealias SWColor = NSColor
#endif
private extension Int64 {
func duplicate4bits() -> Int64 {
return (self << 4) + self
}
}
/// An extension of UIColor (on iOS) or NSColor (on OSX) providing HEX color handling.
public extension SWColor {
private convenience init?(hex3: Int64, alpha: Float) {
self.init(red: CGFloat( ((hex3 & 0xF00) >> 8).duplicate4bits() ) / 255.0,
green: CGFloat( ((hex3 & 0x0F0) >> 4).duplicate4bits() ) / 255.0,
blue: CGFloat( ((hex3 & 0x00F) >> 0).duplicate4bits() ) / 255.0,
alpha: CGFloat(alpha))
}
private convenience init?(hex4: Int64, alpha: Float?) {
self.init(red: CGFloat( ((hex4 & 0xF000) >> 12).duplicate4bits() ) / 255.0,
green: CGFloat( ((hex4 & 0x0F00) >> 8).duplicate4bits() ) / 255.0,
blue: CGFloat( ((hex4 & 0x00F0) >> 4).duplicate4bits() ) / 255.0,
alpha: alpha.map(CGFloat.init(_:)) ?? CGFloat( ((hex4 & 0x000F) >> 0).duplicate4bits() ) / 255.0)
}
private convenience init?(hex6: Int64, alpha: Float) {
self.init(red: CGFloat( (hex6 & 0xFF0000) >> 16 ) / 255.0,
green: CGFloat( (hex6 & 0x00FF00) >> 8 ) / 255.0,
blue: CGFloat( (hex6 & 0x0000FF) >> 0 ) / 255.0, alpha: CGFloat(alpha))
}
private convenience init?(hex8: Int64, alpha: Float?) {
self.init(red: CGFloat( (hex8 & 0xFF000000) >> 24 ) / 255.0,
green: CGFloat( (hex8 & 0x00FF0000) >> 16 ) / 255.0,
blue: CGFloat( (hex8 & 0x0000FF00) >> 8 ) / 255.0,
alpha: alpha.map(CGFloat.init(_:)) ?? CGFloat( (hex8 & 0x000000FF) >> 0 ) / 255.0)
}
/**
Create non-autoreleased color with in the given hex string and alpha.
- parameter hexString: The hex string, with or without the hash character.
- parameter alpha: The alpha value, a floating value between 0 and 1.
- returns: A color with the given hex string and alpha.
*/
convenience init?(hexString: String, alpha: Float? = nil) {
var hex = hexString
// Check for hash and remove the hash
if hex.hasPrefix("#") {
hex = String(hex[hex.index(after: hex.startIndex)...])
}
guard let hexVal = Int64(hex, radix: 16) else {
self.init()
return nil
}
switch hex.count {
case 3:
self.init(hex3: hexVal, alpha: alpha ?? 1.0)
case 4:
self.init(hex4: hexVal, alpha: alpha)
case 6:
self.init(hex6: hexVal, alpha: alpha ?? 1.0)
case 8:
self.init(hex8: hexVal, alpha: alpha)
default:
// Note:
// The swift 1.1 compiler is currently unable to destroy partially initialized classes in all cases,
// so it disallows formation of a situation where it would have to. We consider this a bug to be fixed
// in future releases, not a feature. -- Apple Forum
self.init()
return nil
}
}
/**
Create non-autoreleased color with in the given hex value and alpha
- parameter hex: The hex value. For example: 0xff8942 (no quotation).
- parameter alpha: The alpha value, a floating value between 0 and 1.
- returns: color with the given hex value and alpha
*/
convenience init?(hex: Int, alpha: Float = 1.0) {
if (0x000000 ... 0xFFFFFF) ~= hex {
self.init(hex6: Int64(hex), alpha: alpha)
} else {
self.init()
return nil
}
}
convenience init?(argbHex: Int) {
if (0x00000000 ... 0xFFFFFFFF) ~= argbHex {
let hex = Int64(argbHex)
self.init(red: CGFloat( (hex & 0x00FF0000) >> 16 ) / 255.0,
green: CGFloat( (hex & 0x0000FF00) >> 8 ) / 255.0,
blue: CGFloat( (hex & 0x000000FF) >> 0 ) / 255.0,
alpha: CGFloat( (hex & 0xFF000000) >> 24 ) / 255.0)
} else {
self.init()
return nil
}
}
convenience init?(argbHexString: String) {
var hex = argbHexString
// Check for hash and remove the hash
if hex.hasPrefix("#") {
hex = String(hex[hex.index(after: hex.startIndex)...])
}
guard hex.count == 8, let hexVal = Int64(hex, radix: 16) else {
self.init()
return nil
}
self.init(red: CGFloat( (hexVal & 0x00FF0000) >> 16 ) / 255.0,
green: CGFloat( (hexVal & 0x0000FF00) >> 8 ) / 255.0,
blue: CGFloat( (hexVal & 0x000000FF) >> 0 ) / 255.0,
alpha: CGFloat( (hexVal & 0xFF000000) >> 24 ) / 255.0)
}
}

How to convert value of type UIColor to Uint in swift [duplicate]

This question already has answers here:
How to get the RGB Code (INT) from an UIColor in Swift [duplicate]
(7 answers)
Closed 6 years ago.
I got this UIColor :
UIColor(red: 0.2, green: 0.4118, blue: 0.1176, alpha: 1.0)
And I need to convert in Uint. How can I do that?
EDIT :
func showEmailMessage(advice : String)
{
_ = SCLAlertView().showSuccess("Congratulation", subTitle: advice, closeButtonTitle: "Ok", duration : 10, colorStyle: 0x33691e, colorTextButton: 0xFFFFFF)
}
Color style field want Uint
You can make use of the UIColor.getRed(...) method to extract the colors as CGFloat, thereafter convert the values of your CGFloat triplet to the proper bit positions of an UInt32 variable.
// Example: use color triplet CC6699 "=" {204, 102, 153} (RGB triplet)
let color = UIColor(red: 204.0/255.0, green: 102.0/255.0, blue: 153.0/255.0, alpha: 1.0)
// read colors to CGFloats and convert and position to proper bit positions in UInt32
var red: CGFloat = 0, green: CGFloat = 0, blue: CGFloat = 0, alpha: CGFloat = 0
if color.getRed(&red, green: &green, blue: &blue, alpha: &alpha) {
var colorAsUInt : UInt32 = 0
colorAsUInt += UInt32(red * 255.0) << 16 +
UInt32(green * 255.0) << 8 +
UInt32(blue * 255.0)
colorAsUInt == 0xCC6699 // true
}
For details, see e.g. the Language Guide - Advanced Operators which contains, among other valuable things, an example specifically for bit shifting w.r.t RGB triplets.

Cannot convert value of type 'Int' to expected argument type 'ClassName'

struct ColorPalette {
var DefaultBarTintColor : UIColor = UIColorFromRGB(0x75b5d4)
func UIColorFromRGB(rgbValue: UInt) -> UIColor {
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}
}
error : Cannot convert value of type "int' to expected argument type 'MyClassName'
If I do
view.backColor = UIColorFromRGB(UIColorFromRGB)
it works
But I want to do like this:
view.backColor = ColorPalette.DefaultBarTintColor
I don't know how to fix it
To call function inside struct, you need to use static keyword. Here is the modified function of your's!
struct ColorPalette {
static var DefaultBarTintColor : UIColor = ColorPalette.UIColorFromRGB(0x75b5d4)
static func UIColorFromRGB(rgbValue: UInt) -> UIColor {
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}
}
Now you can call like this!
view.backColor = ColorPalette.DefaultBarTintColor
Although, if you need to call without using the static keyword then you need to make the object of your struct. Like:
let myColorPallete = ColorPalette()
then you can access it like this:
viewNoEvents.backgroundColor = myColorPallete.DefaultBarTintColor
Hope this helps!
Instead of initializing property while declaration, you can have initializer function as below:
struct ColorPalette {
var DefaultBarTintColor : UIColor!
func UIColorFromRGB(rgbValue: Int) -> UIColor {
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}
init(rgbValue: Int) {
self.DefaultBarTintColor = UIColorFromRGB(rgbValue)
}
}
And then create instance by passing hex value as below:
var color = ColorPalette(rgbValue: 0x75B5D4)

Hex code for clear color

My ques may be silly. Is there an hexa code for [UIColor clearColor]? If so, what is the code? Thanks in advance.
RGBA Hex Code for ClearColor:
NNNNNN00 whereas N may be any hex-value (0-F).
UIColor *clearColor = [UIColor clearColor];
CGFloat red = 0;
CGFloat green = 0;
CGFloat blue = 0;
CGFloat alpha = 0;
[clearColor getRed:&red green:&green blue:&blue alpha:&alpha];
NSLog(#"red: %.3f, green: %.3f, blue: %.3f, alpha: %.3f",
red, green, blue, alpha);
NSLog(#"red: 0x%02x, green: 0x%02x, blue: 0x%02x, alpha: 0x%02x",
(int)(red*255.0), (int)(green*255.0), (int)(blue*255.0), (int)(alpha*255.0));
NSLog output:
red: 0.000, green: 0.000, blue: 0.000, alpha: 0.000
red: 0x00, green: 0x00, blue: 0x00, alpha: 0x00
As far as I know, hexadecimal color codes are made up of RRGGBB values, whereas [UIColor clearColor] depends very much on there being an alpha component.
So the answer is "nope", there's no hexa code for clearColor.
I like using hex colours in my configuration, so I modified a commonly used macro to split RGBA hex (eg. #00000055 semi-transparent black) into a UIColor with alpha:
#define UIColorFromRGBA(rgbaValue) [UIColor \
colorWithRed:((float)((rgbaValue & 0xFF000000) >> 24))/255.0 \
green:((float)((rgbaValue & 0xFF0000) >> 16))/255.0 \
blue:((float)((rgbaValue & 0xFF00) >> 8))/255.0 \
alpha:((float)(rgbaValue & 0xFF))/255.0 ]
Example usage:
cell.backgroundColor = UIColorFromRGBA(0x00000055);

how to display image view background color with hash value

how to display image view background color with hash values.
i need to place image view background color with #028002.
can any one please help me,
Thank u in advance.
I've used next UIColor Category:
#implementation UIColor(Tools)
+ (UIColor *)colorWithHex:(NSInteger)hex alpha:(CGFloat)alpha {
return [UIColor colorWithRed:((float)((hex & 0xFF0000) >> 16))/255.0 green:((float)((hex & 0xFF00) >> 8))/255.0 blue:((float)(hex & 0xFF))/255.0 alpha:alpha];
}
#end
// Example of Usage:
// backgroundView.backgroundColor = [UIColor colorWithHex:0xe1e1e1 alpha:1.0];
There is no need to parse strings here.
UIColor does not support hex value directly. You need to convert that yourself. Something like this:
CGFloat r = (CGFloat)0x02 / 0xFF;
CGFloat g = (CGFloat)0x80 / 0xFF;
CGFloat b = (CGFloat)0x02 / 0xFF;
[UIColor colorWithRed:r green:g blue:b alpha:1.0]
If you want to convert hash value to UIColor this may help you
http://imthi.com/blog/programming/iphone-sdk-convert-hex-color-string-to-uicolor.php
In swift you can use below method
func hexStringToUIColor (hex:String) -> UIColor {
var cString:String = hex.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
if (cString.hasPrefix("#")) {
cString.remove(at: cString.startIndex)
}
if ((cString.count) != 6) {
return UIColor.gray
}
var rgbValue:UInt32 = 0
Scanner(string: cString).scanHexInt32(&rgbValue)
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}