Swift Hex string Decoding - swift

I'm going insane trying to get this to work
Trying to duplicate the following java function
https://commons.apache.org/proper/commons-codec/apidocs/org/apache/commons/codec/binary/Hex.html#decodeHex-java.lang.String-
Here is the code from Java
public static byte[] decodeHex(final String data) throws DecoderException {
return decodeHex(data.toCharArray());
}
public static byte[] decodeHex(final char[] data) throws DecoderException {
final int len = data.length;
if ((len & 0x01) != 0) {
throw new DecoderException("Odd number of characters.");
}
final byte[] out = new byte[len >> 1];
// two characters form the hex value.
for (int i = 0, j = 0; j < len; i++) {
int f = toDigit(data[j], j) << 4;
j++;
f = f | toDigit(data[j], j);
j++;
out[i] = (byte) (f & 0xFF);
}
return out;
}
protected static int toDigit(final char ch, final int index) throws DecoderException {
final int digit = Character.digit(ch, 16);
if (digit == -1) {
throw new DecoderException("Illegal hexadecimal character " + ch + " at index " + index);
}
return digit;
}
given the following string 01e703000000000000
the byte array should contain
[1, -25, 3, 0, 0, 0, 0, 0, 0]
I thought this may be standard hex decoding so I used the usual decoding function I've been using
extension String {
/// A data representation of the hexadecimal bytes in this string.
var hexDecodedData: Data {
// Get the UTF8 characters of this string
let chars = Array(utf8)
// Keep the bytes in an UInt8 array and later convert it to Data
var bytes = [UInt8]()
bytes.reserveCapacity(count / 2)
// It is a lot faster to use a lookup map instead of stratal
let map: [UInt8] = [
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, // 01234567
0x08, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 89:;<=>?
0x00, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x00, // #ABCDEFG
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // HIJKLMNO
]
// Grab two characters at a time, map them and turn it into a byte
for i in stride(from: 0, to: count, by: 2) {
let index1 = Int(chars[i] & 0x1F ^ 0x10)
let index2 = Int(chars[i + 1] & 0x1F ^ 0x10)
bytes.append(map[index1] << 4 | map[index2])
}
return Data(bytes)
}
}
This results in
[1, 231, 3, 0, 0, 0, 0, 0, 0]
So the I tried converting the java code to swift myself
extension String {
// public static byte[] decodeHex(final char[] data) throws DecoderException
func decodeHex() throws -> [Int] {
let stringArray = Array(self)
let len = count
var out: [Int?] = Array(repeating: nil, count: len >> 1)
if (len & 0x01) != 0 {
throw HExDecodingError.oddNumberOfCharacters
}
var i = 0
var j = 0
while j < len {
var f: Int = try Self.toDigit(char: String(stringArray[j]), index: j)
j += 1
f = f | (try Self.toDigit(char: String(stringArray[j]), index: j))
j += 1
out[i] = f & 0xFF
i += 1
}
return out.compactMap { $0 }
}
enum HExDecodingError: Error {
case oddNumberOfCharacters
case illegalCharacter(String)
case conversionToDogotFailed(String, Int)
}
static func toDigit(char: String, index: Int) throws -> Int {
let digit = Int(char, radix: 16)
if digit == -1 {
throw HExDecodingError.illegalCharacter(char)
}
guard let digit = digit else {
throw HExDecodingError.conversionToDogotFailed(char, index)
}
return digit
}
}
Which results in
[1, 15, 3, 0, 0, 0, 0, 0, 0]
What is going on? what am I doing wrong
EDIT:
Also how can there possibly be a negative number in there since a byte array is represented as a [UInt8]

The Java byte primitive type
is an 8-bit signed two's complement integer. It has a minimum value of -128 and a maximum value of 127 (inclusive).
(Java, in general, does not have unsigned primitive types, only signed ones.)
In your Java output, the -25 value corresponds to hex E7 in your string, whose decimal value is E * 16 + 7 = 14 * 16 + 7 = 231; 231 is outside of the [-128, 127] range, and wraps around to -25. (More precisely the bit pattern of unsigned 8-bit 231 corresponds to the bit pattern of signed 8-bit -25 in two's-complement.)
In Swift, you're using a UInt8 to represent results (both explicitly, and implicitly in a Data value), and the range of UInt8 is [0, 255]; 231 fits within this range, and is what you see in your first Swift code snippet. The results are bitwise equivalent, but if you need results which are type equivalent to what you're seeing in Java, you'll need to work in terms of Int8 instead of UInt8.

Related

CheckSum8 Modulo 256 Swift

I have an array of UInt8 and I want to calculate CheckSum8 Modulo 256.
If sum of bytes is less than 255, checkSum function returns correct value.
e.g
let bytes1 : [UInt8] = [1, 0xa1]
let validCheck = checkSum(data : bytes1) // 162 = 0xa2
let bytes : [UInt8] = [6, 0xB1, 27,0xc5,0xf5,0x9d]
let invalidCheck = checkSum(data : bytes) // 41
Below function returns 41 but expected checksum is 35.
func checkSum(data: [UInt8]) -> UInt8 {
var sum = 0
for i in 0..<data.count {
sum += Int(data[i])
}
let retVal = sum & 0xff
return UInt8(retVal)
}
Your checkSum method is largely right. If you want, you could simplify it to:
func checkSum(_ values: [UInt8]) -> UInt8 {
let result = values.reduce(0) { ($0 + UInt32($1)) & 0xff }
return UInt8(result)
}
You point out a web site that reports the checksum8 for 06B127c5f59d is 35.
The problem is that in your array has 27, not 0x27. If you have hexadecimal values, you always need the 0x prefix for each value in your array literal (or, technically, at least if the value is larger than 9).
So, consider:
let values: [UInt8] = [0x06, 0xB1, 0x27, 0xc5, 0xf5, 0x9d]
let result = checkSum(values)
That’s 53. If you want to see that in hexadecimal (like that site you referred to):
let hex = String(result, radix: 16)
That shows us that the checksum is 0x35 in hexadecimal.

How to convert hex string to byte array in CAPL?

Considering having, for example, this type of hex string:
char hex_str[100] = "0x01 0x03 0x04 0x0A";
How to get out of this string the byte array representation in CAPL, like:
byte hex_str_as_byte_arr[4] = {0x01, 0x03, 0x04, 0x0A};
EDIT: Only Vector CANoe supported data types/functions are allowed!
Use strtok to split the character array into separate hex strings, then use long strtol( const char *restrict str, char **restrict str_end, int base ) to convert each hex string to an integral value.
Thanks to all...
Actually I've found a solution myself:
char hex_str[100] = "0x01 0x03 0x04 0x0A";
long data[4];
dword pos = 0;
pos = strtol(hex_str, pos, data[0]);
pos = strtol(hex_str, pos, data[1]);
pos = strtol(hex_str, pos, data[2]);
pos = strtol(hex_str, pos, data[3]);
write("0x%02x,0x%02x,0x%02x, 0x%02x", data[0], data[1], data[2], data[3]);
Now it's a simple cast: (byte) data[0]
We can use sscanf() to convert the numbers to unsigned char. In a loop, we'll need to also use a %n conversion to determine the reading position for the next iteration.
Here's a simple example (in real life, you'll need some range checking to make sure you don't overrun the output buffer):
#include <stdio.h>
int main(void)
{
const char hex_str[100] = "0x01, 0x03, 0x04, 0x0A";
unsigned char bytes[4];
{
int position;
unsigned char *b = bytes;
for (const char *input = hex_str; sscanf(input, "%hhi, %n", b, &position) == 1; ++b) {
input += position;
}
}
/* prove we did it */
for (size_t i = 0; i < sizeof bytes; ++i) {
printf("%hhu ", bytes[i]);
}
puts("");
}

Reading Data into a Struct in Swift

I'm trying to read bare Data into a Swift 4 struct using the withUnsafeBytes method. The problem
The network UDP packet has this format:
data: 0102 0A00 0000 0B00 0000
01 : 1 byte : majorVersion (decimal 01)
02 : 1 byte : minorVersion (decimal 02)
0A00 0000 : 4 bytes: applicationHostId (decimal 10)
0B00 0000 : 4 bytes: versionNumber (decimal 11)
Then I have an extension on Data that takes a start and the length of bytes to read
extension Data {
func scanValue<T>(start: Int, length: Int) -> T {
return self.subdata(in: start..<start+length).withUnsafeBytes { $0.pointee }
}
}
This works correctly when reading the values one by one:
// correctly read as decimal "1"
let majorVersion: UInt8 = data.scanValue(start: 0, length: 1)
// correctly read as decimal "2"
let minorVersion: UInt8 = data.scanValue(start: 1, length: 1)
// correctly read as decimal "10"
let applicationHostId: UInt32 = data.scanValue(start: 2, length: 4)
// correctly read as decimal "11"
let versionNumber: UInt32 = data.scanValue(start: 6, length: 4)
Then I created a struct that represents the entire packet as follows
struct XPLBeacon {
var majorVersion: UInt8 // 1 Byte
var minorVersion: UInt8 // 1 Byte
var applicationHostId: UInt32 // 4 Bytes
var versionNumber: UInt32 // 4 Bytes
}
But when I read the data directly into the structure I have some issues:
var beacon: XPLBeacon = data.scanValue(start: 0, length: data.count)
// correctly read as decimal "1"
beacon.majorVersion
// correctly read as decimal "2"
beacon.minorVersion
// not correctly read
beacon.applicationHostId
// not correctly read
beacon.versionNumber
I it supposed to work to parse an entire struct like this?
Reading the entire structure from the data does not work because
the struct members are padded to their natural boundary. The
memory layout of struct XPLBeacon is
A B x x C C C C D D D D
where
offset member
0 A - majorVersion (UInt8)
1 B - minorVersion (UInt8)
2 x x - padding
4 C C C C - applicationHostId (UInt32)
8 D D D D - versionNumber (UInt32)
and the padding is inserted so that the UInt32 members are
aligned to memory addresses which are a multiple of their size. This is
also confirmed by
print(MemoryLayout<XPLBeacon>.size) // 12
(For more information about alignment in Swift, see
Type Layout).
If you read the entire data into the struct then the bytes are assigned
as follows
01 02 0A 00 00 00 0B 00 00 00
A B x x C C C C D D D D
which explains why major/minorVersion are correct, but applicationHostId and versionNumber
are wrong. Reading all members separately from the data is the correct solution.
Since Swift 3 Data conforms to RandomAccessCollection, MutableCollection, RangeReplaceableCollection. So you can simply create a custom initializer to initialise your struct properties as follow:
struct XPLBeacon {
let majorVersion, minorVersion: UInt8 // 1 + 1 = 2 Bytes
let applicationHostId, versionNumber: UInt32 // 4 + 4 = 8 Bytes
init(data: Data) {
self.majorVersion = data[0]
self.minorVersion = data[1]
self.applicationHostId = data
.subdata(in: 2..<6)
.withUnsafeBytes { $0.load(as: UInt32.self) }
self.versionNumber = data
.subdata(in: 6..<10)
.withUnsafeBytes { $0.load(as: UInt32.self) }
}
}
var data = Data([0x01,0x02, 0x0A, 0x00, 0x00, 0x00, 0x0B, 0x00, 0x00,0x00])
print(data as NSData) // "{length = 10, bytes = 0x01020a0000000b000000}\n" <01020a00 00000b00 0000>
let beacon = XPLBeacon(data: data)
beacon.majorVersion // 1
beacon.minorVersion // 2
beacon.applicationHostId // 10
beacon.versionNumber // 11
Following on Leo Dabus answer, I created a slightly more readable constructor:
extension Data {
func object<T>(at index: Index) -> T {
subdata(in: index ..< index.advanced(by: MemoryLayout<T>.size))
.withUnsafeBytes { $0.load(as: T.self) }
}
}
struct XPLBeacon {
var majorVersion: UInt8
var minorVersion: UInt8
var applicationHostId: UInt32
var versionNumber: UInt32
init(data: Data) {
var index = data.startIndex
majorVersion = data.object(at: index)
index += MemoryLayout.size(ofValue: majorVersion)
minorVersion = data.object(at: index)
index += MemoryLayout.size(ofValue: minorVersion)
applicationHostId = data.object(at: index)
index += MemoryLayout.size(ofValue: applicationHostId)
versionNumber = data.object(at: index)
}
}
What is not part of this is of course the checking for the correctness of the data. As other mentioned in comments, this could be done either by having a failable init method or by throwing an Error.

How to use bit field with Swift to store values with more than 1 bit

In C I can do something like this:
struct byte_nibbles {
unsigned char b1: 4;
unsigned char b2: 4;
unsigned char b3: 4;
unsigned char b4: 4;
unsigned char b5: 4;
unsigned char b6: 4;
unsigned char b7: 4;
unsigned char b8: 4;
};
union {
unsigned long var;
struct byte_nibbles b;
}
u;
int main(void)
{
u.b.b1=0x01; u.b.b2=0x02; u.b.b3=0x03; u.b.b4=0x04;
u.b.b5=0x05; u.b.b6=0x06; u.b.b7=0x07; u.b.b8=0x08;
return 0;
}
So I can access specific parts of the byte_nibbles.
Obviously this is just one example. It is possible to create bit fields of any size that fits in the basic types.
Despite my efforts and a lot of research I could not figure out how to do this in Swift. I can use bitwise to have the same result, but this is not as readable and elegant.
Any idea?
Swift simply does not support bit fields, so you can only
use the next larger integer type instead (in your case Int8) and accept
that the variables need more memory, or
use bit operations to access the different parts of the integer.
For the second case you could define custom computed properties to ease
the access. As an example:
extension UInt8 {
var lowNibble : UInt8 {
get {
return self & 0x0F
}
set(newValue) {
self = (self & 0xF0) | (newValue & 0x0F)
}
}
var highNibble : UInt8 {
get {
return (self & 0xF0) >> 4
}
set(newValue) {
self = (self & 0x0F) | ((newValue & 0x0F) << 4)
}
}
}
var byte : UInt8 = 0
byte.lowNibble = 0x01
byte.highNibble = 0x02
print(byte.lowNibble)
print(byte.highNibble)

Comparing set of bits in byte array

I have a byte array, as follows:
byte[] array = new byte[] { 0xAB, 0x7B, 0xF0, 0xEA, 0x04, 0x2E, 0xF3, 0xA9};
The task is to find the quantity of occurrences '0xA' in it.
Could you advise what to do? The answer is 6.
So from your comment, you want the total count of appearances of the bit pattern 1010 in the bytes in your array.
For a given byte b, the count is the sum of
(b & 0x0A) == 0x0A ? 1 : 0
(b & 0x14) == 0x14 ? 1 : 0
(b & 0x28) == 0x28 ? 1 : 0
(b & 0x50) == 0x50 ? 1 : 0
(b & 0xA0) == 0xA0 ? 1 : 0
(left as an exercise: what is this doing?)
Put this in a function, call it for each byte in the array, sum the results.
If you treat the entire array as a single bit-string:
0xAB, 0x7B, 0xF0, 0xEA, 0x04, 0x2E, 0xF3, 0xA9 is then:
10101011 01111011 11110000 11101010 00000100 00101110 11110011 10101001
==== ==== ====
==== ==== ====
This has 1010 occurring 6 times.
If you don't try to match across byte boundaries, you could try something like the following (tested in Perl and translated by hand):
int counter = 0;
for (int i = 0; i < array.length; ++i)
{
for (int bits = 0xA0, mask = 0xF0; bits >= 0x0A; bits >>= 1, mask >>= 1)
{
if ((array[i] & mask) == bits)
++counter;
}
}
To match across byte boundaries, you have to shift the bits in from the next byte. Try something like this (tested in Perl and translated by hand):
int counter = 0;
byte tester = array[0];
for (int i = 1; i < array.length + 1; ++i)
{
byte nextByte = i < array.length ? array[i] : 0;
for (int bit = 0; bit < 8; ++bit)
{
if ((tester & 0xF0) == 0xA0)
++counter;
tester <<= 1;
if ((nextByte & 0x80) != 0)
tester |= 1;
nextByte <<= 1;
}
}
Both programs count 6 as there are no 1010 sequences across byte-boundaries in this example.