How to fix the Integer literal '2147483648' overflows when stored into 'Int' exception? - swift

let stringData = "84121516" // this is 4 bytes data
let value = self.checkHexToInt(stringData: stringData)
func checkHexToInt(stringData: String) -> Int? {
guard let num = Int(stringData, radix: 16) else {
return nil
}
return Int(num)
}
// values is 2215777558 But I need most significant bit only
let checkEngineLightOn = ((value! & 0x80000000) > 0);
When I am doing this I got the exception saying "Integer literal '2147483648' overflows when stored into 'Int'"
When I do this I am expecting to get either true or false. Or is any other to get most significant bit out of Int Value?

As #OOPer noted in the comments, on a 32-bit system Int is 32-bits and your value is larger than Int32.max. Since you are decoding 4 bytes you can use UInt32:
func checkHexToUInt32(stringData: String) -> UInt32? {
return UInt32(stringData, radix: 16)
}
let stringData = "84121516" // this is 4 bytes data
let value = self.checkHexToUInt32(stringData: stringData)
let checkEngineLightOn = ((value! & 0x80000000) > 0)
Note: UInt32(_:radix:) returns an UInt32? which is nil if the conversion fails, so there is no reason for the guard and return nil, just return the value of the conversion.

Related

Decode nsData to String Array

I want to decode my nsData to a String Array. I have this code right now:
func nsDataToStringArray(data: NSData) -> [String] {
var decodedStrings = [String]()
var stringTerminatorPositions = [Int]()
var currentPosition = 0
data.enumerateBytes() {
buffer, range, stop in
let bytes = UnsafePointer<UInt8>(buffer)
for i in 0 ..< range.length {
if bytes[i] == 0 {
stringTerminatorPositions.append(currentPosition)
}
currentPosition += 1
}
}
var stringStartPosition = 0
for stringTerminatorPosition in stringTerminatorPositions {
let encodedString = data.subdata(with: NSMakeRange(stringStartPosition, stringTerminatorPosition - stringStartPosition))
let decodedString = NSString(data: encodedString, encoding: String.Encoding.utf8.rawValue)! as String
decodedStrings.append(decodedString)
stringStartPosition = stringTerminatorPosition + 1
}
return decodedStrings
}
But I get an error on this line: let bytes = UnsafePointer<UInt8>(buffer)
Cannot invoke initializer for type 'UnsafePointer' with an
argument list of type '(UnsafeRawPointer)'
Do I need to convert the buffer to a UnsafePointer? If so, how can I do that?
buffer in the enumerateBytes() closure is a UnsafeRawPointer
and you have to "rebind" it to an UInt8 pointer in Swift 3:
// let bytes = UnsafePointer<UInt8>(buffer)
let bytes = buffer.assumingMemoryBound(to: UInt8.self)
But why so complicated? You can achieve the same result with
func nsDataToStringArray(nsData: NSData) -> [String] {
let data = nsData as Data
return data.split(separator: 0).flatMap { String(bytes: $0, encoding: .utf8) }
}
How does this work?
Data is a Sequence of UInt8, therefore
split(separator: 0) can be called on it, returning an array of
"data slices" (which are views into the source data, not copies).
Each "data slice" is again a Sequence of UInt8, from which a
String can be created with String(bytes: $0, encoding: .utf8).
This is a failable initializer (because the data may be invalid UTF-8).
flatMap { ... } returns an array with all non-nil results,
i.e. an array with all strings which could be created from
valid UTF-8 code sequences between zero bytes.

Convert hex-encoded String to String

I want to convert following hex-encoded String in Swift 3:
dcb04a9e103a5cd8b53763051cef09bc66abe029fdebae5e1d417e2ffc2a07a4
to its equivalant String:
Ü°J:\ص7cï ¼f«à)ýë®^A~/ü*¤
Following websites do the job very fine:
http://codebeautify.org/hex-string-converter
http://string-functions.com/hex-string.aspx
But I am unable to do the same in Swift 3. Following code doesn't do the job too:
func convertHexStringToNormalString(hexString:String)->String!{
if let data = hexString.data(using: .utf8){
return String.init(data:data, encoding: .utf8)
}else{ return nil}
}
Your code doesn't do what you think it does. This line:
if let data = hexString.data(using: .utf8){
means "encode these characters into UTF-8." That means that "01" doesn't encode to 0x01 (1), it encodes to 0x30 0x31 ("0" "1"). There's no "hex" in there anywhere.
This line:
return String.init(data:data, encoding: .utf8)
just takes the encoded UTF-8 data, interprets it as UTF-8, and returns it. These two methods are symmetrical, so you should expect this whole function to return whatever it was handed.
Pulling together Martin and Larme's comments into one place here. This appears to be encoded in Latin-1. (This is a really awkward way to encode this data, but if it's what you're looking for, I think that's the encoding.)
import Foundation
extension Data {
// From http://stackoverflow.com/a/40278391:
init?(fromHexEncodedString string: String) {
// Convert 0 ... 9, a ... f, A ...F to their decimal value,
// return nil for all other input characters
func decodeNibble(u: UInt16) -> UInt8? {
switch(u) {
case 0x30 ... 0x39:
return UInt8(u - 0x30)
case 0x41 ... 0x46:
return UInt8(u - 0x41 + 10)
case 0x61 ... 0x66:
return UInt8(u - 0x61 + 10)
default:
return nil
}
}
self.init(capacity: string.utf16.count/2)
var even = true
var byte: UInt8 = 0
for c in string.utf16 {
guard let val = decodeNibble(u: c) else { return nil }
if even {
byte = val << 4
} else {
byte += val
self.append(byte)
}
even = !even
}
guard even else { return nil }
}
}
let d = Data(fromHexEncodedString: "dcb04a9e103a5cd8b53763051cef09bc66abe029fdebae5e1d417e2ffc2a07a4")!
let s = String(data: d, encoding: .isoLatin1)
You want to use the hex encoded data as an AES key, but the
data is not a valid UTF-8 sequence. You could interpret
it as a string in ISO Latin encoding, but the AES(key: String, ...)
initializer converts the string back to its UTF-8 representation,
i.e. you'll get different key data from what you started with.
Therefore you should not convert it to a string at all. Use the
extension Data {
init?(fromHexEncodedString string: String)
}
method from hex/binary string conversion in Swift
to convert the hex encoded string to Data and then pass that
as an array to the AES(key: Array<UInt8>, ...) initializer:
let hexkey = "dcb04a9e103a5cd8b53763051cef09bc66abe029fdebae5e1d417e2ffc2a07a4"
let key = Array(Data(fromHexEncodedString: hexkey)!)
let encrypted = try AES(key: key, ....)
There is still a way to convert the key from hex to readable string by adding the below extension:
extension String {
func hexToString()->String{
var finalString = ""
let chars = Array(self)
for count in stride(from: 0, to: chars.count - 1, by: 2){
let firstDigit = Int.init("\(chars[count])", radix: 16) ?? 0
let lastDigit = Int.init("\(chars[count + 1])", radix: 16) ?? 0
let decimal = firstDigit * 16 + lastDigit
let decimalString = String(format: "%c", decimal) as String
finalString.append(Character.init(decimalString))
}
return finalString
}
func base64Decoded() -> String? {
guard let data = Data(base64Encoded: self) else { return nil }
return String(data: data, encoding: .init(rawValue: 0))
}
}
Example of use:
let hexToString = secretKey.hexToString()
let base64ReadableKey = hexToString.base64Decoded() ?? ""

Convert String.CharacterView.Index to int [duplicate]

I want to convert the index of a letter contained within a string to an integer value. Attempted to read the header files but I cannot find the type for Index, although it appears to conform to protocol ForwardIndexType with methods (e.g. distanceTo).
var letters = "abcdefg"
let index = letters.characters.indexOf("c")!
// ERROR: Cannot invoke initializer for type 'Int' with an argument list of type '(String.CharacterView.Index)'
let intValue = Int(index) // I want the integer value of the index (e.g. 2)
Any help is appreciated.
edit/update:
Xcode 11 • Swift 5.1 or later
extension StringProtocol {
func distance(of element: Element) -> Int? { firstIndex(of: element)?.distance(in: self) }
func distance<S: StringProtocol>(of string: S) -> Int? { range(of: string)?.lowerBound.distance(in: self) }
}
extension Collection {
func distance(to index: Index) -> Int { distance(from: startIndex, to: index) }
}
extension String.Index {
func distance<S: StringProtocol>(in string: S) -> Int { string.distance(to: self) }
}
Playground testing
let letters = "abcdefg"
let char: Character = "c"
if let distance = letters.distance(of: char) {
print("character \(char) was found at position #\(distance)") // "character c was found at position #2\n"
} else {
print("character \(char) was not found")
}
let string = "cde"
if let distance = letters.distance(of: string) {
print("string \(string) was found at position #\(distance)") // "string cde was found at position #2\n"
} else {
print("string \(string) was not found")
}
Works for Xcode 13 and Swift 5
let myString = "Hello World"
if let i = myString.firstIndex(of: "o") {
let index: Int = myString.distance(from: myString.startIndex, to: i)
print(index) // Prints 4
}
The function func distance(from start: String.Index, to end: String.Index) -> String.IndexDistance returns an IndexDistance which is just a typealias for Int
Swift 4
var str = "abcdefg"
let index = str.index(of: "c")?.encodedOffset // Result: 2
Note: If String contains same multiple characters, it will just get the nearest one from left
var str = "abcdefgc"
let index = str.index(of: "c")?.encodedOffset // Result: 2
encodedOffset has deprecated from Swift 4.2.
Deprecation message:
encodedOffset has been deprecated as most common usage is incorrect. Use utf16Offset(in:) to achieve the same behavior.
So we can use utf16Offset(in:) like this:
var str = "abcdefgc"
let index = str.index(of: "c")?.utf16Offset(in: str) // Result: 2
When searching for index like this
⛔️ guard let index = (positions.firstIndex { position <= $0 }) else {
it is treated as Array.Index. You have to give compiler a clue you want an integer
✅ guard let index: Int = (positions.firstIndex { position <= $0 }) else {
Swift 5
You can do convert to array of characters and then use advanced(by:) to convert to integer.
let myString = "Hello World"
if let i = Array(myString).firstIndex(of: "o") {
let index: Int = i.advanced(by: 0)
print(index) // Prints 4
}
To perform string operation based on index , you can not do it with traditional index numeric approach. because swift.index is retrieved by the indices function and it is not in the Int type. Even though String is an array of characters, still we can't read element by index.
This is frustrating.
So ,to create new substring of every even character of string , check below code.
let mystr = "abcdefghijklmnopqrstuvwxyz"
let mystrArray = Array(mystr)
let strLength = mystrArray.count
var resultStrArray : [Character] = []
var i = 0
while i < strLength {
if i % 2 == 0 {
resultStrArray.append(mystrArray[i])
}
i += 1
}
let resultString = String(resultStrArray)
print(resultString)
Output : acegikmoqsuwy
Thanks In advance
Here is an extension that will let you access the bounds of a substring as Ints instead of String.Index values:
import Foundation
/// This extension is available at
/// https://gist.github.com/zackdotcomputer/9d83f4d48af7127cd0bea427b4d6d61b
extension StringProtocol {
/// Access the range of the search string as integer indices
/// in the rendered string.
/// - NOTE: This is "unsafe" because it may not return what you expect if
/// your string contains single symbols formed from multiple scalars.
/// - Returns: A `CountableRange<Int>` that will align with the Swift String.Index
/// from the result of the standard function range(of:).
func countableRange<SearchType: StringProtocol>(
of search: SearchType,
options: String.CompareOptions = [],
range: Range<String.Index>? = nil,
locale: Locale? = nil
) -> CountableRange<Int>? {
guard let trueRange = self.range(of: search, options: options, range: range, locale: locale) else {
return nil
}
let intStart = self.distance(from: startIndex, to: trueRange.lowerBound)
let intEnd = self.distance(from: trueRange.lowerBound, to: trueRange.upperBound) + intStart
return Range(uncheckedBounds: (lower: intStart, upper: intEnd))
}
}
Just be aware that this can lead to weirdness, which is why Apple has chosen to make it hard. (Though that's a debatable design decision - hiding a dangerous thing by just making it hard...)
You can read more in the String documentation from Apple, but the tldr is that it stems from the fact that these "indices" are actually implementation-specific. They represent the indices into the string after it has been rendered by the OS, and so can shift from OS-to-OS depending on what version of the Unicode spec is being used. This means that accessing values by index is no longer a constant-time operation, because the UTF spec has to be run over the data to determine the right place in the string. These indices will also not line up with the values generated by NSString, if you bridge to it, or with the indices into the underlying UTF scalars. Caveat developer.
In case you got an "index is out of bounds" error. You may try this approach. Working in Swift 5
extension String{
func countIndex(_ char:Character) -> Int{
var count = 0
var temp = self
for c in self{
if c == char {
//temp.remove(at: temp.index(temp.startIndex,offsetBy:count))
//temp.insert(".", at: temp.index(temp.startIndex,offsetBy: count))
return count
}
count += 1
}
return -1
}
}

Swift - converting from UnsafePointer<UInt8> with length to String

I considered a lot of similar questions, but still can't get the compiler to accept this.
Socket Mobile API (in Objective-C) passes ISktScanDecodedData into a delegate method in Swift (the data may be binary, which I suppose is why it's not provided as string):
func onDecodedData(device: DeviceInfo?, DecodedData d: ISktScanDecodedData?) {
let symbology: String = d!.Name()
let rawData: UnsafePointer<UInt8> = d!.getData()
let rawDataSize: UInt32 = decoded!.getDataSize()
// want a String (UTF8 is OK) or Swifty byte array...
}
In C#, this code converts the raw data into a string:
string s = Marshal.PtrToStringAuto(d.GetData(), d.GetDataSize());
In Swift, I can get as far as UnsafeArray, but then I'm stuck:
let rawArray = UnsafeArray<UInt8>(start: rawData, length: Int(rawDataSize))
Alternatively I see String.fromCString and NSString.stringWithCharacters, but neither will accept the types of arguments at hand. If I could convert from UnsafePointer<UInt8> to UnsafePointer<()>, for example, then this would be available (though I'm not sure if it would even be safe):
NSData(bytesNoCopy: UnsafePointer<()>, length: Int, freeWhenDone: Bool)
Is there an obvious way to get a string out of all this?
This should work:
let data = NSData(bytes: rawData, length: Int(rawDataSize))
let str = String(data: data, encoding: NSUTF8StringEncoding)
Update for Swift 3:
let data = Data(bytes: rawData, count: Int(rawDataSize))
let str = String(data: data, encoding: String.Encoding.utf8)
The resulting string is nil if the data does not represent
a valid UTF-8 sequence.
How about this, 'pure' Swift 2.2 instead of using NSData:
public extension String {
static func fromCString
(cs: UnsafePointer<CChar>, length: Int!) -> String?
{
if length == .None { // no length given, use \0 standard variant
return String.fromCString(cs)
}
let buflen = length + 1
var buf = UnsafeMutablePointer<CChar>.alloc(buflen)
memcpy(buf, cs, length))
buf[length] = 0 // zero terminate
let s = String.fromCString(buf)
buf.dealloc(buflen)
return s
}
}
and Swift 3:
public extension String {
static func fromCString
(cs: UnsafePointer<CChar>, length: Int!) -> String?
{
if length == nil { // no length given, use \0 standard variant
return String(cString: cs)
}
let buflen = length + 1
let buf = UnsafeMutablePointer<CChar>.allocate(capacity: buflen)
memcpy(buf, cs, length)
buf[length] = 0 // zero terminate
let s = String(cString: buf)
buf.deallocate(capacity: buflen)
return s
}
}
Admittedly it's a bit stupid to alloc a buffer and copy the data just to add the zero terminator.
Obviously, as mentioned by Zaph, you need to make sure your assumptions about the string encoding are going to be right.

How do you cast a UInt64 to an Int64?

Trying to call dispatch_time in Swift is doing my head in, here's why:
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 10 * NSEC_PER_SEC), dispatch_get_main_queue(), {
doSomething()
})
Results in the error: "Could not find an overload for '*' that accepts the supplied arguments".
NSEC_PER_SEC is an UInt64 so time for some experiments:
let x:UInt64 = 1000
let m:Int64 = 10 * x
Results in the same error as above
let x:UInt64 = 1000
let m:Int64 = 10 * (Int64) x
Results in "Consecutive statements on a line must be separated by ';'"
let x:UInt64 = 1000
let m:Int64 = 10 * ((Int64) x)
Results in "Expected ',' separator"
let x:UInt64 = 1000
let m:Int64 = (Int64)10 * (Int64) x
Results in "Consecutive statements on a line must be separated by ';'"
Etc. etc.
Damn you Swift compiler, I give up. How do I cast a UInt64 to Int64, and/or how do you use dispatch_time in swift?
You can "cast" between different integer types by initializing a new integer with the type you want:
let uint:UInt64 = 1234
let int:Int64 = Int64(uint)
It's probably not an issue in your particular case, but it's worth noting that different integer types have different ranges, and you can end up with out of range crashes if you try to convert between integers of different types:
let bigUInt:UInt64 = UInt64(Int64.max) - 1 // 9,223,372,036,854,775,806
let bigInt:Int64 = Int64(bigUInt) // no problem
let biggerUInt:UInt64 = UInt64(Int64.max) + 1 // 9,223,372,036,854,775,808
let biggerInt:Int64 = Int64(biggerUInt) // crash!
Each integer type has .max and .min class properties that you can use for checking ranges:
if (biggerUInt <= UInt64(Int64.max)) {
let biggerInt:Int64 = Int64(biggerUInt) // safe!
}
To construct an Int64 using the bits of a UInt64, use the init seen here: https://developer.apple.com/reference/swift/int64/1538466-init
let myInt64 = Int64(bitPattern: myUInt64)
Try this:
let x:UInt64 = 1000 // 1,000
let m:Int64 = 10 * Int64(x) // 10,000
or even :
let x:UInt64 = 1000 // 1,000
let m = 10 * Int64(x) // 10,000
let n = Int64(10 * x) // 10,000
let y = Int64(x) // 1,000, as Int64 (as per #Bill's question)
It's not so much casting as initialising with a separate type...
Casting a UInt64 to an Int64 is not safe since a UInt64 can have a number which is greater than Int64.max, which will result in an overflow.
Here's a snippet for converting a UInt64 to Int64 and vice-versa:
// Extension for 64-bit integer signed <-> unsigned conversion
extension Int64 {
var unsigned: UInt64 {
let valuePointer = UnsafeMutablePointer<Int64>.allocate(capacity: 1)
defer {
valuePointer.deallocate(capacity: 1)
}
valuePointer.pointee = self
return valuePointer.withMemoryRebound(to: UInt64.self, capacity: 1) { $0.pointee }
}
}
extension UInt64 {
var signed: Int64 {
let valuePointer = UnsafeMutablePointer<UInt64>.allocate(capacity: 1)
defer {
valuePointer.deallocate(capacity: 1)
}
valuePointer.pointee = self
return valuePointer.withMemoryRebound(to: Int64.self, capacity: 1) { $0.pointee }
}
}
This simply interprets the binary data of UInt64 as an Int64, i.e. numbers greater than Int64.max will be negative because of the sign bit at the most significat bit of the 64-bit integer.
If you just want positive integers, just get the absolute value.
EDIT: Depending on behavior, you can either get the absolute value, or:
if currentValue < 0 {
return Int64.max + currentValue + 1
} else {
return currentValue
}
The latter option is similar to stripping the sign bit. Ex:
// Using an 8-bit integer for simplicity
// currentValue
0b1111_1111 // If this is interpreted as Int8, this is -1.
// Strip sign bit
0b0111_1111 // As Int8, this is 127. To get this we can add Int8.max
// Int8.max + currentValue + 1
127 + (-1) + 1 = 127
Better solution for converting:
UInt64 Int64_2_UInt64(Int64 Value)
{
return (((UInt64)((UInt32)((UInt64)Value >> 32))) << 32)
| (UInt64)((UInt32)((UInt64)Value & 0x0ffffffff));
}
Int64 UInt64_2_Int64(UInt64 Value)
{
return (Int64)((((Int64)(UInt32)((UInt64)Value >> 32)) << 32)
| (Int64)((UInt32)((UInt64)Value & 0x0ffffffff)));
}
simple solution for Swift 3 is an inbuilt function that takes care of overflows and buffer management.
var a:UInt64 = 1234567890
var b:Int64 = numericCast(a)