Decode nsData to String Array - swift

I want to decode my nsData to a String Array. I have this code right now:
func nsDataToStringArray(data: NSData) -> [String] {
var decodedStrings = [String]()
var stringTerminatorPositions = [Int]()
var currentPosition = 0
data.enumerateBytes() {
buffer, range, stop in
let bytes = UnsafePointer<UInt8>(buffer)
for i in 0 ..< range.length {
if bytes[i] == 0 {
stringTerminatorPositions.append(currentPosition)
}
currentPosition += 1
}
}
var stringStartPosition = 0
for stringTerminatorPosition in stringTerminatorPositions {
let encodedString = data.subdata(with: NSMakeRange(stringStartPosition, stringTerminatorPosition - stringStartPosition))
let decodedString = NSString(data: encodedString, encoding: String.Encoding.utf8.rawValue)! as String
decodedStrings.append(decodedString)
stringStartPosition = stringTerminatorPosition + 1
}
return decodedStrings
}
But I get an error on this line: let bytes = UnsafePointer<UInt8>(buffer)
Cannot invoke initializer for type 'UnsafePointer' with an
argument list of type '(UnsafeRawPointer)'
Do I need to convert the buffer to a UnsafePointer? If so, how can I do that?

buffer in the enumerateBytes() closure is a UnsafeRawPointer
and you have to "rebind" it to an UInt8 pointer in Swift 3:
// let bytes = UnsafePointer<UInt8>(buffer)
let bytes = buffer.assumingMemoryBound(to: UInt8.self)
But why so complicated? You can achieve the same result with
func nsDataToStringArray(nsData: NSData) -> [String] {
let data = nsData as Data
return data.split(separator: 0).flatMap { String(bytes: $0, encoding: .utf8) }
}
How does this work?
Data is a Sequence of UInt8, therefore
split(separator: 0) can be called on it, returning an array of
"data slices" (which are views into the source data, not copies).
Each "data slice" is again a Sequence of UInt8, from which a
String can be created with String(bytes: $0, encoding: .utf8).
This is a failable initializer (because the data may be invalid UTF-8).
flatMap { ... } returns an array with all non-nil results,
i.e. an array with all strings which could be created from
valid UTF-8 code sequences between zero bytes.

Related

How to convert UInt16 to UInt8 in Swift 3?

I want to convert UInt16 to UInt8 array, but am getting the following error message:
'init' is unavailable: use 'withMemoryRebound(to:capacity:_)' to
temporarily view memory as another layout-compatible type.
The code:
let statusByte: UInt8 = UInt8(status)
let lenghtByte: UInt16 = UInt16(passwordBytes.count)
var bigEndian = lenghtByte.bigEndian
let bytePtr = withUnsafePointer(to: &bigEndian) {
UnsafeBufferPointer<UInt8>(start: UnsafePointer($0), count: MemoryLayout.size(ofValue: bigEndian))
}
As the error message indicates, you have to use withMemoryRebound()
to reinterpret the pointer to UInt16 as a pointer to UInt8:
let bytes = withUnsafePointer(to: &bigEndian) {
$0.withMemoryRebound(to: UInt8.self, capacity: MemoryLayout.size(ofValue: bigEndian)) {
Array(UnsafeBufferPointer(start: $0, count: MemoryLayout.size(ofValue: bigEndian)))
}
}
The closures are invoked with pointers ($0) which are only valid
for the lifetime of the closure and must not be passed to the outside
for later use. That's why an Array is created and used as return value.
There is a simpler solution however:
let bytes = withUnsafeBytes(of: &bigEndian) { Array($0) }
Explanation: withUnsafeBytes invokes the closure with a UnsafeRawBufferPointer to the storage of the bigEndian variable.
Since UnsafeRawBufferPointer is a Sequence of UInt8, an array
can be created from that with Array($0).
You can extend Numeric protocol and create a data property as follow:
Swift 4 or later
extension Numeric {
var data: Data {
var source = self
return Data(bytes: &source, count: MemoryLayout<Self>.size)
}
}
Since Swift 3 Data conforms to RandomAccessCollection so you can just create an array of bytes from your UInt16 bigEndian data:
extension Data {
var array: [UInt8] { return Array(self) }
}
let lenghtByte = UInt16(8)
let bytePtr = lenghtByte.bigEndian.data.array // [0, 8]

Convert hex-encoded String to String

I want to convert following hex-encoded String in Swift 3:
dcb04a9e103a5cd8b53763051cef09bc66abe029fdebae5e1d417e2ffc2a07a4
to its equivalant String:
Ü°J:\ص7cï ¼f«à)ýë®^A~/ü*¤
Following websites do the job very fine:
http://codebeautify.org/hex-string-converter
http://string-functions.com/hex-string.aspx
But I am unable to do the same in Swift 3. Following code doesn't do the job too:
func convertHexStringToNormalString(hexString:String)->String!{
if let data = hexString.data(using: .utf8){
return String.init(data:data, encoding: .utf8)
}else{ return nil}
}
Your code doesn't do what you think it does. This line:
if let data = hexString.data(using: .utf8){
means "encode these characters into UTF-8." That means that "01" doesn't encode to 0x01 (1), it encodes to 0x30 0x31 ("0" "1"). There's no "hex" in there anywhere.
This line:
return String.init(data:data, encoding: .utf8)
just takes the encoded UTF-8 data, interprets it as UTF-8, and returns it. These two methods are symmetrical, so you should expect this whole function to return whatever it was handed.
Pulling together Martin and Larme's comments into one place here. This appears to be encoded in Latin-1. (This is a really awkward way to encode this data, but if it's what you're looking for, I think that's the encoding.)
import Foundation
extension Data {
// From http://stackoverflow.com/a/40278391:
init?(fromHexEncodedString string: String) {
// Convert 0 ... 9, a ... f, A ...F to their decimal value,
// return nil for all other input characters
func decodeNibble(u: UInt16) -> UInt8? {
switch(u) {
case 0x30 ... 0x39:
return UInt8(u - 0x30)
case 0x41 ... 0x46:
return UInt8(u - 0x41 + 10)
case 0x61 ... 0x66:
return UInt8(u - 0x61 + 10)
default:
return nil
}
}
self.init(capacity: string.utf16.count/2)
var even = true
var byte: UInt8 = 0
for c in string.utf16 {
guard let val = decodeNibble(u: c) else { return nil }
if even {
byte = val << 4
} else {
byte += val
self.append(byte)
}
even = !even
}
guard even else { return nil }
}
}
let d = Data(fromHexEncodedString: "dcb04a9e103a5cd8b53763051cef09bc66abe029fdebae5e1d417e2ffc2a07a4")!
let s = String(data: d, encoding: .isoLatin1)
You want to use the hex encoded data as an AES key, but the
data is not a valid UTF-8 sequence. You could interpret
it as a string in ISO Latin encoding, but the AES(key: String, ...)
initializer converts the string back to its UTF-8 representation,
i.e. you'll get different key data from what you started with.
Therefore you should not convert it to a string at all. Use the
extension Data {
init?(fromHexEncodedString string: String)
}
method from hex/binary string conversion in Swift
to convert the hex encoded string to Data and then pass that
as an array to the AES(key: Array<UInt8>, ...) initializer:
let hexkey = "dcb04a9e103a5cd8b53763051cef09bc66abe029fdebae5e1d417e2ffc2a07a4"
let key = Array(Data(fromHexEncodedString: hexkey)!)
let encrypted = try AES(key: key, ....)
There is still a way to convert the key from hex to readable string by adding the below extension:
extension String {
func hexToString()->String{
var finalString = ""
let chars = Array(self)
for count in stride(from: 0, to: chars.count - 1, by: 2){
let firstDigit = Int.init("\(chars[count])", radix: 16) ?? 0
let lastDigit = Int.init("\(chars[count + 1])", radix: 16) ?? 0
let decimal = firstDigit * 16 + lastDigit
let decimalString = String(format: "%c", decimal) as String
finalString.append(Character.init(decimalString))
}
return finalString
}
func base64Decoded() -> String? {
guard let data = Data(base64Encoded: self) else { return nil }
return String(data: data, encoding: .init(rawValue: 0))
}
}
Example of use:
let hexToString = secretKey.hexToString()
let base64ReadableKey = hexToString.base64Decoded() ?? ""

Why is my variable returning a nil after being converted to an int?

I'm taking data from an api and storing it in a string variable. When I print the variable it returns what I'm looking for but when I try to convert it to an int using the .toInt method it returns a nil?
func getWeather() {
let url = NSURL(string: "http://api.openweathermap.org/data/2.5/weather?q=London&mode=xml")
let task = NSURLSession.sharedSession().dataTaskWithURL(url!) {
(data, response, error) in
if error == nil {
var urlContent = NSString(data: data, encoding: NSUTF8StringEncoding) as NSString!
var urlContentArray = urlContent.componentsSeparatedByString("temperature value=\"")
var temperatureString = (urlContentArray[1].substringWithRange(NSRange(location: 0, length:6))) as String
println(temperatureString) // returns 272.32
var final = temperatureString.toInt()
println(final) //returns nil
println(temperatureString.toInt())
self.temperature.text = "\(temperatureString)"
}
}
task.resume()
}
Even simpler, though slightly a trick, you can use integerValue:
temperatureString.integerValue
Unlike toInt, integerValue will stop converting when it finds a non-digit (it also throws away leading spaces.
If temperatureString is a String (rather than an NSString), you'll need to push it over:
(temperatureString as NSString).integerValue
That's because 272.32 is not an integer.
You could convert it to a Float.
272.32 isn't an Integer. If your temperature string is an NSString, you can do this to convert it:
Int("272.32".floatValue)

Swift - converting from UnsafePointer<UInt8> with length to String

I considered a lot of similar questions, but still can't get the compiler to accept this.
Socket Mobile API (in Objective-C) passes ISktScanDecodedData into a delegate method in Swift (the data may be binary, which I suppose is why it's not provided as string):
func onDecodedData(device: DeviceInfo?, DecodedData d: ISktScanDecodedData?) {
let symbology: String = d!.Name()
let rawData: UnsafePointer<UInt8> = d!.getData()
let rawDataSize: UInt32 = decoded!.getDataSize()
// want a String (UTF8 is OK) or Swifty byte array...
}
In C#, this code converts the raw data into a string:
string s = Marshal.PtrToStringAuto(d.GetData(), d.GetDataSize());
In Swift, I can get as far as UnsafeArray, but then I'm stuck:
let rawArray = UnsafeArray<UInt8>(start: rawData, length: Int(rawDataSize))
Alternatively I see String.fromCString and NSString.stringWithCharacters, but neither will accept the types of arguments at hand. If I could convert from UnsafePointer<UInt8> to UnsafePointer<()>, for example, then this would be available (though I'm not sure if it would even be safe):
NSData(bytesNoCopy: UnsafePointer<()>, length: Int, freeWhenDone: Bool)
Is there an obvious way to get a string out of all this?
This should work:
let data = NSData(bytes: rawData, length: Int(rawDataSize))
let str = String(data: data, encoding: NSUTF8StringEncoding)
Update for Swift 3:
let data = Data(bytes: rawData, count: Int(rawDataSize))
let str = String(data: data, encoding: String.Encoding.utf8)
The resulting string is nil if the data does not represent
a valid UTF-8 sequence.
How about this, 'pure' Swift 2.2 instead of using NSData:
public extension String {
static func fromCString
(cs: UnsafePointer<CChar>, length: Int!) -> String?
{
if length == .None { // no length given, use \0 standard variant
return String.fromCString(cs)
}
let buflen = length + 1
var buf = UnsafeMutablePointer<CChar>.alloc(buflen)
memcpy(buf, cs, length))
buf[length] = 0 // zero terminate
let s = String.fromCString(buf)
buf.dealloc(buflen)
return s
}
}
and Swift 3:
public extension String {
static func fromCString
(cs: UnsafePointer<CChar>, length: Int!) -> String?
{
if length == nil { // no length given, use \0 standard variant
return String(cString: cs)
}
let buflen = length + 1
let buf = UnsafeMutablePointer<CChar>.allocate(capacity: buflen)
memcpy(buf, cs, length)
buf[length] = 0 // zero terminate
let s = String(cString: buf)
buf.deallocate(capacity: buflen)
return s
}
}
Admittedly it's a bit stupid to alloc a buffer and copy the data just to add the zero terminator.
Obviously, as mentioned by Zaph, you need to make sure your assumptions about the string encoding are going to be right.

How can I create a String from UTF8 in Swift?

We know we can print each character in UTF8 code units?
Then, if we have code units of these characters, how can we create a String with them?
With Swift 5, you can choose one of the following ways in order to convert a collection of UTF-8 code units into a string.
#1. Using String's init(_:) initializer
If you have a String.UTF8View instance (i.e. a collection of UTF-8 code units) and want to convert it to a string, you can use init(_:) initializer. init(_:) has the following declaration:
init(_ utf8: String.UTF8View)
Creates a string corresponding to the given sequence of UTF-8 code units.
The Playground sample code below shows how to use init(_:):
let string = "Café 🇫🇷"
let utf8View: String.UTF8View = string.utf8
let newString = String(utf8View)
print(newString) // prints: Café 🇫🇷
#2. Using Swift's init(decoding:as:) initializer
init(decoding:as:) creates a string from the given Unicode code units collection in the specified encoding:
let string = "Café 🇫🇷"
let codeUnits: [Unicode.UTF8.CodeUnit] = Array(string.utf8)
let newString = String(decoding: codeUnits, as: UTF8.self)
print(newString) // prints: Café 🇫🇷
Note that init(decoding:as:) also works with String.UTF8View parameter:
let string = "Café 🇫🇷"
let utf8View: String.UTF8View = string.utf8
let newString = String(decoding: utf8View, as: UTF8.self)
print(newString) // prints: Café 🇫🇷
#3. Using transcode(_:from:to:stoppingOnError:into:) function
The following example transcodes the UTF-8 representation of an initial string into Unicode scalar values (UTF-32 code units) that can be used to build a new string:
let string = "Café 🇫🇷"
let bytes = Array(string.utf8)
var newString = ""
_ = transcode(bytes.makeIterator(), from: UTF8.self, to: UTF32.self, stoppingOnError: true, into: {
newString.append(String(Unicode.Scalar($0)!))
})
print(newString) // prints: Café 🇫🇷
#4. Using Array's withUnsafeBufferPointer(_:) method and String's init(cString:) initializer
init(cString:) has the following declaration:
init(cString: UnsafePointer<CChar>)
Creates a new string by copying the null-terminated UTF-8 data referenced by the given pointer.
The following example shows how to use init(cString:) with a pointer to the content of a CChar array (i.e. a well-formed UTF-8 code unit sequence) in order to create a string from it:
let bytes: [CChar] = [67, 97, 102, -61, -87, 32, -16, -97, -121, -85, -16, -97, -121, -73, 0]
let newString = bytes.withUnsafeBufferPointer({ (bufferPointer: UnsafeBufferPointer<CChar>)in
return String(cString: bufferPointer.baseAddress!)
})
print(newString) // prints: Café 🇫🇷
#5. Using Unicode.UTF8's decode(_:) method
To decode a code unit sequence, call decode(_:) repeatedly until it returns UnicodeDecodingResult.emptyInput:
let string = "Café 🇫🇷"
let codeUnits = Array(string.utf8)
var codeUnitIterator = codeUnits.makeIterator()
var utf8Decoder = Unicode.UTF8()
var newString = ""
Decode: while true {
switch utf8Decoder.decode(&codeUnitIterator) {
case .scalarValue(let value):
newString.append(Character(Unicode.Scalar(value)))
case .emptyInput:
break Decode
case .error:
print("Decoding error")
break Decode
}
}
print(newString) // prints: Café 🇫🇷
#6. Using String's init(bytes:encoding:) initializer
Foundation gives String a init(bytes:encoding:) initializer that you can use as indicated in the Playground sample code below:
import Foundation
let string = "Café 🇫🇷"
let bytes: [Unicode.UTF8.CodeUnit] = Array(string.utf8)
let newString = String(bytes: bytes, encoding: String.Encoding.utf8)
print(String(describing: newString)) // prints: Optional("Café 🇫🇷")
It's possible to convert UTF8 code points to a Swift String idiomatically using the UTF8 Swift class. Although it's much easier to convert from String to UTF8!
import Foundation
public class UTF8Encoding {
public static func encode(bytes: Array<UInt8>) -> String {
var encodedString = ""
var decoder = UTF8()
var generator = bytes.generate()
var finished: Bool = false
do {
let decodingResult = decoder.decode(&generator)
switch decodingResult {
case .Result(let char):
encodedString.append(char)
case .EmptyInput:
finished = true
/* ignore errors and unexpected values */
case .Error:
finished = true
default:
finished = true
}
} while (!finished)
return encodedString
}
public static func decode(str: String) -> Array<UInt8> {
var decodedBytes = Array<UInt8>()
for b in str.utf8 {
decodedBytes.append(b)
}
return decodedBytes
}
}
func testUTF8Encoding() {
let testString = "A UTF8 String With Special Characters: 😀🍎"
let decodedArray = UTF8Encoding.decode(testString)
let encodedString = UTF8Encoding.encode(decodedArray)
XCTAssert(encodedString == testString, "UTF8Encoding is lossless: \(encodedString) != \(testString)")
}
Of the other alternatives suggested:
Using NSString invokes the Objective-C bridge;
Using UnicodeScalar is error-prone because it converts UnicodeScalars directly to Characters, ignoring complex grapheme clusters; and
Using String.fromCString is potentially unsafe as it uses pointers.
improve on Martin R's answer
import AppKit
let utf8 : CChar[] = [65, 66, 67, 0]
let str = NSString(bytes: utf8, length: utf8.count, encoding: NSUTF8StringEncoding)
println(str) // Output: ABC
import AppKit
let utf8 : UInt8[] = [0xE2, 0x82, 0xAC, 0]
let str = NSString(bytes: utf8, length: utf8.count, encoding: NSUTF8StringEncoding)
println(str) // Output: €
What happened is Array can be automatic convert to CConstVoidPointer which can be used to create string with NSSString(bytes: CConstVoidPointer, length len: Int, encoding: Uint)
Swift 3
let s = String(bytes: arr, encoding: .utf8)
I've been looking for a comprehensive answer regarding string manipulation in Swift myself. Relying on cast to and from NSString and other unsafe pointer magic just wasn't doing it for me. Here's a safe alternative:
First, we'll want to extend UInt8. This is the primitive type behind CodeUnit.
extension UInt8 {
var character: Character {
return Character(UnicodeScalar(self))
}
}
This will allow us to do something like this:
let codeUnits: [UInt8] = [
72, 69, 76, 76, 79
]
let characters = codeUnits.map { $0.character }
let string = String(characters)
// string prints "HELLO"
Equipped with this extension, we can now being modifying strings.
let string = "ABCDEFGHIJKLMONP"
var modifiedCharacters = [Character]()
for (index, utf8unit) in string.utf8.enumerate() {
// Insert a "-" every 4 characters
if index > 0 && index % 4 == 0 {
let separator: UInt8 = 45 // "-" in ASCII
modifiedCharacters.append(separator.character)
}
modifiedCharacters.append(utf8unit.character)
}
let modifiedString = String(modifiedCharacters)
// modified string == "ABCD-EFGH-IJKL-MONP"
// Swift4
var units = [UTF8.CodeUnit]()
//
// update units
//
let str = String(decoding: units, as: UTF8.self)
I would do something like this, it may be not such elegant than working with 'pointers' but it does the job well, those are pretty much about a bunch of new += operators for String like:
#infix func += (inout lhs: String, rhs: (unit1: UInt8)) {
lhs += Character(UnicodeScalar(UInt32(rhs.unit1)))
}
#infix func += (inout lhs: String, rhs: (unit1: UInt8, unit2: UInt8)) {
lhs += Character(UnicodeScalar(UInt32(rhs.unit1) << 8 | UInt32(rhs.unit2)))
}
#infix func += (inout lhs: String, rhs: (unit1: UInt8, unit2: UInt8, unit3: UInt8, unit4: UInt8)) {
lhs += Character(UnicodeScalar(UInt32(rhs.unit1) << 24 | UInt32(rhs.unit2) << 16 | UInt32(rhs.unit3) << 8 | UInt32(rhs.unit4)))
}
NOTE: you can extend the list of the supported operators with overriding + operator as well, defining a list of the fully commutative operators for String.
and now you are able to append a String with a unicode (UTF-8, UTF-16 or UTF-32) character like e.g.:
var string: String = "signs of the Zodiac: "
string += (0x0, 0x0, 0x26, 0x4b)
string += (38)
string += (0x26, 76)
This is a possible solution (now updated for Swift 2):
let utf8 : [CChar] = [65, 66, 67, 0]
if let str = utf8.withUnsafeBufferPointer( { String.fromCString($0.baseAddress) }) {
print(str) // Output: ABC
} else {
print("Not a valid UTF-8 string")
}
Within the closure, $0 is a UnsafeBufferPointer<CChar> pointing to the array's contiguous storage. From that a Swift String can be created.
Alternatively, if you prefer the input as unsigned bytes:
let utf8 : [UInt8] = [0xE2, 0x82, 0xAC, 0]
if let str = utf8.withUnsafeBufferPointer( { String.fromCString(UnsafePointer($0.baseAddress)) }) {
print(str) // Output: €
} else {
print("Not a valid UTF-8 string")
}
If you're starting with a raw buffer, such as from the Data object returned from a file handle (in this case, taken from a Pipe object):
let data = pipe.fileHandleForReading.readDataToEndOfFile()
var unsafePointer = UnsafeMutablePointer<UInt8>.allocate(capacity: data.count)
data.copyBytes(to: unsafePointer, count: data.count)
let output = String(cString: unsafePointer)
There is Swift 3.0 version of Martin R answer
public class UTF8Encoding {
public static func encode(bytes: Array<UInt8>) -> String {
var encodedString = ""
var decoder = UTF8()
var generator = bytes.makeIterator()
var finished: Bool = false
repeat {
let decodingResult = decoder.decode(&generator)
switch decodingResult {
case .scalarValue(let char):
encodedString += "\(char)"
case .emptyInput:
finished = true
case .error:
finished = true
}
} while (!finished)
return encodedString
}
public static func decode(str: String) -> Array<UInt8> {
var decodedBytes = Array<UInt8>()
for b in str.utf8 {
decodedBytes.append(b)
}
return decodedBytes
}
}
If you want show emoji from UTF-8 string, just user convertEmojiCodesToString method below. It is working properly for strings like "U+1F52B" (emoji) or "U+1F1E6 U+1F1F1" (country flag emoji)
class EmojiConverter {
static func convertEmojiCodesToString(_ emojiCodesString: String) -> String {
let emojies = emojiCodesString.components(separatedBy: " ")
var resultString = ""
for emoji in emojies {
var formattedCode = emoji
formattedCode.slice(from: 2, to: emoji.length)
formattedCode = formattedCode.lowercased()
if let charCode = UInt32(formattedCode, radix: 16),
let unicode = UnicodeScalar(charCode) {
let str = String(unicode)
resultString += "\(str)"
}
}
return resultString
}
}