I have string variable:
var str = "239A23F"
How do I convert this string to a binary number?
str.toInt() does not work.
You can use NSScanner() from the Foundation framework:
let scanner = NSScanner(string: str)
var result : UInt32 = 0
if scanner.scanHexInt(&result) {
println(result) // 37331519
}
Or the BSD library function strtoul()
let num = strtoul(str, nil, 16)
println(num) // 37331519
As of Swift 2 (Xcode 7), all integer types have an
public init?(_ text: String, radix: Int = default)
initializer, so that a pure Swift solution is available:
let str = "239A23F"
let num = Int(str, radix: 16)
Related
How can I make "\u{3A9}" String with coding? here what I tried, but did not worked!
let omegaHexadecimal: String = "3A9"
let omega = "\u{" + omegaHexadecimal + "}"
Or:
let omegaHexadecimal: String = "3A9"
let omega = "\u{\(omegaHexadecimal)}"
Update:
extension StringProtocol where Self: RangeReplaceableCollection {
private var decodingUnicodeCharacters: String { applyingTransform(.init("Hex-Any"), reverse: false) ?? "" }
func stringToUniCodeHexConvertor(upTo lenght: Int = 4, using character: Character = "0") -> String {
return ("\\u" + repeatElement(character, count: Swift.max(0,lenght-count)) + self).decodingUnicodeCharacters
}
}
let omegaHexadecimal: String = "3A9"
let omega = omegaHexadecimal.stringToUniCodeHexConvertor()
print(omega) // "Ω"
You can pad your string up to 4 hexa digits (2 bytes UInt16), add \u prefix \uXXXX and use a string transform to convert your unicode hexa value to the corresponding character:
extension StringProtocol where Self: RangeReplaceableCollection {
func paddingToLeft(upTo lenght: Int = 4, using character: Character = "0") -> Self {
repeatElement(character, count: Swift.max(0,lenght-count)) + self
}
var decodingUnicodeCharacters: String { applyingTransform(.init("Hex-Any"), reverse: false) ?? "" }
}
let omegaHexadecimal: String = "3A9"
let omega = "\\u" + omegaHexadecimal.paddingToLeft() // "\\u03A9"
omega.decodingUnicodeCharacters // "Ω"
Why so complicated? Work with numbers directly:
let omega = UnicodeScalar(0x3A9)!
print(String(omega)) // Ω
so I have a string like this 0b00001 is there a way to turn that into an Int, I tried something like this,
let string = "0b0001"
let int = Int(string)
but since there is a "b" in the string to specify that it is binary it sets int to nil.
Is there any way to turn the contents of string into an int?
Yes. You can use BinaryInteger generic initializer but you need to drop the "0b" prefix:
init?<S>(_ text: S, radix: Int = 10) where S : StringProtocol
let str = "0b0101"
let int = Int(str.dropFirst(2), radix: 2) // 5
We can use String Format specifier to convert an integer value or a long value to hexadecimal notation.
Int Example:
print(String(format:"%x", 1111))
//result:457
Long Example:
print(String(format:"%lx", 11111111111111))
//result:a1b01d4b1c7
But, what if we try to convert a very large decimal that is larger than uint64.max? //18446744073709551615
What is the right way to convert in this case?
One possible solution is to use NSDecimalNumber to hold the large value. But it doesn't have any built in way to convert the number into a string other than base 10.
The following is an extension to NSDecimalNumber that will convert the number into any base from 2 to 16. And it also includes a convenience init that takes a string in a given base.
extension NSDecimalNumber {
convenience init(string: String, base: Int) {
guard base >= 2 && base <= 16 else { fatalError("Invalid base") }
let digits = "0123456789ABCDEF"
let baseNum = NSDecimalNumber(value: base)
var res = NSDecimalNumber(value: 0)
for ch in string {
let index = digits.index(of: ch)!
let digit = digits.distance(from: digits.startIndex, to: index)
res = res.multiplying(by: baseNum).adding(NSDecimalNumber(value: digit))
}
self.init(decimal: res.decimalValue)
}
func toBase(_ base: Int) -> String {
guard base >= 2 && base <= 16 else { fatalError("Invalid base") }
// Support higher bases by added more digits
let digits = "0123456789ABCDEF"
let rounding = NSDecimalNumberHandler(roundingMode: .down, scale: 0, raiseOnExactness: false, raiseOnOverflow: false, raiseOnUnderflow: false, raiseOnDivideByZero: false)
let baseNum = NSDecimalNumber(value: base)
var res = ""
var val = self
while val.compare(0) == .orderedDescending {
let next = val.dividing(by: baseNum, withBehavior: rounding)
let round = next.multiplying(by: baseNum)
let diff = val.subtracting(round)
let digit = diff.intValue
let index = digits.index(digits.startIndex, offsetBy: digit)
res.insert(digits[index], at: res.startIndex)
val = next
}
return res
}
}
Test:
let bigNum = NSDecimalNumber(string: "18446744073709551615")
print(bigNum.toBase(16))
print(bigNum.toBase(10)) // or just print(bigNum)
print(NSDecimalNumber(string: "B7", base: 16))
print(NSDecimalNumber(string: NSDecimalNumber(string: "18446744073709551615").toBase(16), base: 16))
Output:
FFFFFFFFFFFFFFFF
18446744073709551615
183
18446744073709551615
Suppose I am given a string like this:
D7C17A4F
How do I convert each individual character to a hex value?
So D should be 0xD, 7 should be 0x7…
Right now, I have each individual character represented as it's ASCII value. D is 68, 7 is 55. I'm trying to pack those two values into one byte. For example: D7 becomes 0xD7 and C1 becomes 0xC1. I can't do that using the ASCII decimal values though.
A possible solution:
let string = "D7C17A4F"
let chars = Array(string)
let numbers = map (stride(from: 0, to: chars.count, by: 2)) {
strtoul(String(chars[$0 ..< $0+2]), nil, 16)
}
Using the approach from https://stackoverflow.com/a/29306523/1187415,
the string is split into substrings of two characters.
Each substring is interpreted as a sequence of digits
in base 16, and converted to a number with strtoul().
Verify the result:
println(numbers)
// [215, 193, 122, 79]
println(map(numbers, { String(format: "%02X", $0) } ))
// [D7, C1, 7A, 4F]
Update for Swift 2 (Xcode 7):
let string = "D7C17A4F"
let chars = Array(string.characters)
let numbers = 0.stride(to: chars.count, by: 2).map {
UInt8(String(chars[$0 ..< $0+2]), radix: 16) ?? 0
}
print(numbers)
or
let string = "D7C17A4F"
var numbers = [UInt8]()
var from = string.startIndex
while from != string.endIndex {
let to = from.advancedBy(2, limit: string.endIndex)
numbers.append(UInt8(string[from ..< to], radix: 16) ?? 0)
from = to
}
print(numbers)
The second solution looks a bit more complicated but has the small
advantage that no additional chars array is needed.
Swift 3 version, modified from #Martin R's answer. This variant also accepts incoming string with odd length.
let string = "D7C17A4F"
let chars = Array(string.characters)
let numbers = stride(from: 0, to: chars.count, by: 2).map() {
strtoul(String(chars[$0 ..< min($0 + 2, chars.count)]), nil, 16)
}
Use chunks!
"D7C17A4F"
.chunks(ofCount: 2)
.map { UInt8($0, radix: 0x10)! }
My variation of #martin-r answer:
extension String {
func hexToByteArray() -> [UInt8] {
let byteCount = self.utf8.count / 2
var array = [UInt8](count: byteCount, repeatedValue: 0)
var from = self.startIndex
for i in 0..<byteCount {
let to = from.successor()
let sub = self.substringWithRange(from...to)
array[i] = UInt8(sub, radix: 16) ?? 0
from = to.successor()
}
return array
}
}
here is the more generic, "pure swift" approach (no Foundation required :-))
extension UnsignedInteger {
var hex: String {
var str = String(self, radix: 16, uppercase: true)
while str.characters.count < 2 * MemoryLayout<Self>.size {
str.insert("0", at: str.startIndex)
}
return str
}
}
extension Array where Element: UnsignedInteger {
var hex: String {
var str = ""
self.forEach { (u) in
str.append(u.hex)
}
return str
}
}
let str = [UInt8(1),22,63,41].hex // "01163F29"
let str2 = [UInt(1),22,63,41].hex // "00000000000000010000000000000016000000000000003F0000000000000029"
extension String {
func toUnsignedInteger<T:UnsignedInteger>()->[T]? {
var ret = [T]()
let nibles = MemoryLayout<T>.size * 2
for i in stride(from: 0, to: characters.count, by: nibles) {
let start = self.index(startIndex, offsetBy: i)
guard let end = self.index(start, offsetBy: nibles, limitedBy: endIndex),
let ui = UIntMax(self[start..<end], radix: 16) else { return nil }
ret.append(T(ui))
}
return ret
}
}
let u0:[UInt8]? = str.toUnsignedInteger() // [1, 22, 63, 41]
let u1 = "F2345f".toUnsignedInteger() as [UInt8]? // [18, 52, 95]
let u2 = "12345f".toUnsignedInteger() as [UInt16]? // nil
let u3 = "12345g".toUnsignedInteger() as [UInt8]? // nil
let u4 = "12345f".toUnsignedInteger() as [UInt]? // nil
let u5 = "12345678".toUnsignedInteger() as [UInt8]? // [18, 52, 86, 120]
let u6 = "12345678".toUnsignedInteger() as [UInt16]? // [4660, 22136]
let u7 = "1234567812345678".toUnsignedInteger() as [UInt]? // [1311768465173141112]
It is very easily to do the same for SignedInteger as well, but better approach will be to map results to signed type
let u8 = u1?.map { Int8(bitPattern: $0) } // [-14, 52, 95]
How can I convert Int to UInt8 in Swift?
Example. I want to convert number 22 to 0b00010110
var decimal = 22
var binary:UInt8 = ??? //What should I write here?
You can convert the decimal value to a human-readable binary representation using the String initializer that takes a radix parameter:
let num = 22
let str = String(num, radix: 2)
print(str) // prints "10110"
If you wanted to, you could also pad it with any number of zeroes pretty easily as well:
Swift 5
func pad(string : String, toSize: Int) -> String {
var padded = string
for _ in 0..<(toSize - string.count) {
padded = "0" + padded
}
return padded
}
let num = 22
let str = String(num, radix: 2)
print(str) // 10110
pad(string: str, toSize: 8) // 00010110
Swift 5.1 / Xcode 11
Thanks Gustavo Seidler.
My version of his solution is complemented by spaces for readability.
extension BinaryInteger {
var binaryDescription: String {
var binaryString = ""
var internalNumber = self
var counter = 0
for _ in (1...self.bitWidth) {
binaryString.insert(contentsOf: "\(internalNumber & 1)", at: binaryString.startIndex)
internalNumber >>= 1
counter += 1
if counter % 4 == 0 {
binaryString.insert(contentsOf: " ", at: binaryString.startIndex)
}
}
return binaryString
}
}
Examples:
UInt8(9).binaryDescription // "0000 1001"
Int8(5).binaryDescription // "0000 0101"
UInt16(1945).binaryDescription // "0000 0111 1001 1001"
Int16(14).binaryDescription // "0000 0000 0000 1110"
Int32(6).binaryDescription // "0000 0000 0000 0000 0000 0000 0000 0110"
UInt32(2018).binaryDescription // "0000 0000 0000 0000 0000 0111 1110 0010"
I modified someone's version to swift 3.0 used the correct initializer for creating a string with repeated values
extension String {
func pad(with character: String, toLength length: Int) -> String {
let padCount = length - self.characters.count
guard padCount > 0 else { return self }
return String(repeating: character, count: padCount) + self
}
}
String(37, radix: 2).pad(with: "0", toLength: 8) // "00100101"
Since none of the solutions contemplate negative numbers, I came up with a simple solution that basically reads the number's internal representation and pads it automatically to the width of its type. This should work on all BinaryInteger types.
extension BinaryInteger {
var binaryDescription: String {
var binaryString = ""
var internalNumber = self
for _ in (1...self.bitWidth) {
binaryString.insert(contentsOf: "\(internalNumber & 1)", at: binaryString.startIndex)
internalNumber >>= 1
}
return "0b" + binaryString
}
}
Examples:
UInt8(22).binaryDescription // "0b00010110"
Int8(60).binaryDescription // "0b00111100"
Int8(-60).binaryDescription // "0b11000100"
Int16(255).binaryDescription // "0b0000000011111111"
Int16(-255).binaryDescription // "0b1111111100000001"
Went through a lot of answers on this post but I wonder why haven't anyone mentioned the API leadingZeroBitCount on FixedWidthInteger
This returns the number of zeros in specific UInt
eg:
UInt(4).leadingZeroBitCount //61
UInt16(4).leadingZeroBitCount //13
Swift Version
4.1
USAGE
let strFive = String.binaryRepresentation(of: UInt8(5))
print(strFive) // Prints: 00000101
UNDER THE HOOD
extension String {
static func binaryRepresentation<F: FixedWidthInteger>(of val: F) -> String {
let binaryString = String(val, radix: 2)
if val.leadingZeroBitCount > 0 {
return String(repeating: "0", count: val.leadingZeroBitCount) + binaryString
}
return binaryString
}
}
I agree with the others, Although the for-loop seems redundant for repeating a character.
we can simply go with the following String initialiser:
init(count count: Int, repeatedValue c: Character)
usage example:
let string = String(count: 5, repeatedValue: char)
Here is a full example:
let someBits: UInt8 = 0b00001110
let str = String(someBits, radix:2) //binary base
let padd = String(count: (8 - str.characters.count), repeatedValue: Character("0")) //repeat a character
print(padd + str)
If you want binary to have the value of 22, just assign it that: binary = 22 or you could write it as binary = 0b00010110; the two statements are equivalent.
Here's how I would do it:
extension String {
public func pad(with padding: Character, toLength length: Int) -> String {
let paddingWidth = length - self.characters.count
guard 0 < paddingWidth else { return self }
return String(repeating: padding, count: paddingWidth) + self
}
}
String(0b1010, radix: 2).pad(with: "0", toLength: 8) //00001010
So I had this come up recently. The other generic solutions didn't work for me, due to various issues. Anyway, here's my solution (Swift 4):
extension String {
init<B: FixedWidthInteger>(fullBinary value: B) {
self = value.words.reduce(into: "") {
$0.append(contentsOf: repeatElement("0", count: $1.leadingZeroBitCount))
$0.append(String($1, radix: 2))
}
}
}
Tests:
// result: 0000000000000000000000000000000000000000000000000000000000001001
String(fullBinary: 9)
// result: 1111111111111111111111111111111111111111111111111111111100000000
String(fullBinary: -256)
// result: 1111111111111111111111111111111111111111111111111101100011110001
String(fullBinary: -9999)
// result: 0000000000000000000000000000000000000000000000000010011100001111
String(fullBinary: 9999)
// result: 1100011000000000000000000000000000000000000011110110100110110101
String(fullBinary: 14267403619510741429 as UInt)
swift 4.1
extension String {
public func pad(with padding: Character, toLength length: Int) -> String {
let paddingWidth = length - self.count
guard 0 < paddingWidth else { return self }
return String(repeating: padding, count: paddingWidth) + self
}
}
extension UInt8 {
public func toBits() -> String
{
let a = String( self, radix : 2 )
let b = a.pad(with: "0", toLength: 8)
return b
}
}
func showBits( _ list: [UInt8] )
{
for num in list
{
showBits(num)
}
}
func showBits( _ num: UInt8 )
{
//print(num, String( num, radix : 2 ))
print( "\(num) \t" + num.toBits())
}
let initialBits :UInt8 = 0b00001111
let invertedBits = ~initialBits
showBits( [initialBits, invertedBits] )
result
15 00001111
240 11110000
good for you~
There is no difference between binary and decimal numeral systems, when you're working with variables until you want to visualize them or if you want to convert types which can hold different ammount of bits.
In your case is enough to write
var decimal = 22
var binary = UInt8(decimal)
But this will crash (overflow happens) if decimal will hold a value more than 255, because it is maximum value which UInt8 can hold.
Depending on what you want to achieve you can write
var decimal = 261 // 0b100000101
var binary = UInt8(truncatingBitPattern: decimal) // 0b00000101
You'll get 0 as a result, because this initializer will truncate less significant bits.
Second option is
var decimal = 256 // 0b100000000
var binary = UInt8(exactly: decimal) // nil
This initializer returns nil result instead of crashing, if overflow happens.
P.S. If you want to see binary string representation use
String(decimal, radix: 2)
String(binary, radix: 2)
I modified your version to Swift 2.0 count on strings and added a length check:
extension String {
func pad(length: Int) -> String {
let diff = length - self.characters.count
if diff > 0 {
var padded = self
for _ in 0..<diff {
padded = "0" + padded
}
return padded
} else {
return self
}
}
}
Most answers here forget to account for 0, and outputs a representation there is too long.
Based on the answer by #karwag I present:
extension FixedWidthInteger {
var binaryStringRepresentation: String {
words.reduce(into: "") {
$0.append(contentsOf: repeatElement("0", count: $1.leadingZeroBitCount))
if $1 != 0 {
$0.append(String($1, radix: 2))
}
}
}
}
It's a bit overcomplicated, but very fast.
It separates every 4 bits, leaving no white spaces in the string.
extension BinaryInteger {
var binaryDescription: String {
var string = ""
var num = self
let range: UInt64
switch self.bitWidth {
case 8: range = 0x80
case 16: range = 0x8000
case 32: range = 0x80000000
case 64: range = 0x8000000000000000
default: range = 0x0
}
if Self.isSigned {
let mask = Self(range / 2)
let last = num & 1
num >>= 1
for i in 1...self.bitWidth-1 {
string.append("\(num & mask == mask ? 1 : 0)")
num <<= 1
if i % 4 == 0 { string.append(" ") }
}
string.append("\(last)")
} else { // Unsigned
let mask = Self(range)
for i in 1...self.bitWidth {
string.append("\(num & mask == mask ? 1 : 0)")
num <<= 1
if i % 4 == 0 { string.append(" ") }
}
string = String(string.dropLast())
}
return string
}
}
Examples:
UInt8(245).binaryDescription // 1111 0101
Int8(108).binaryDescription // 0110 1100