How to convert hex string to byte array in CAPL? - type-conversion

Considering having, for example, this type of hex string:
char hex_str[100] = "0x01 0x03 0x04 0x0A";
How to get out of this string the byte array representation in CAPL, like:
byte hex_str_as_byte_arr[4] = {0x01, 0x03, 0x04, 0x0A};
EDIT: Only Vector CANoe supported data types/functions are allowed!

Use strtok to split the character array into separate hex strings, then use long strtol( const char *restrict str, char **restrict str_end, int base ) to convert each hex string to an integral value.

Thanks to all...
Actually I've found a solution myself:
char hex_str[100] = "0x01 0x03 0x04 0x0A";
long data[4];
dword pos = 0;
pos = strtol(hex_str, pos, data[0]);
pos = strtol(hex_str, pos, data[1]);
pos = strtol(hex_str, pos, data[2]);
pos = strtol(hex_str, pos, data[3]);
write("0x%02x,0x%02x,0x%02x, 0x%02x", data[0], data[1], data[2], data[3]);
Now it's a simple cast: (byte) data[0]

We can use sscanf() to convert the numbers to unsigned char. In a loop, we'll need to also use a %n conversion to determine the reading position for the next iteration.
Here's a simple example (in real life, you'll need some range checking to make sure you don't overrun the output buffer):
#include <stdio.h>
int main(void)
{
const char hex_str[100] = "0x01, 0x03, 0x04, 0x0A";
unsigned char bytes[4];
{
int position;
unsigned char *b = bytes;
for (const char *input = hex_str; sscanf(input, "%hhi, %n", b, &position) == 1; ++b) {
input += position;
}
}
/* prove we did it */
for (size_t i = 0; i < sizeof bytes; ++i) {
printf("%hhu ", bytes[i]);
}
puts("");
}

Related

Swift Hex string Decoding

I'm going insane trying to get this to work
Trying to duplicate the following java function
https://commons.apache.org/proper/commons-codec/apidocs/org/apache/commons/codec/binary/Hex.html#decodeHex-java.lang.String-
Here is the code from Java
public static byte[] decodeHex(final String data) throws DecoderException {
return decodeHex(data.toCharArray());
}
public static byte[] decodeHex(final char[] data) throws DecoderException {
final int len = data.length;
if ((len & 0x01) != 0) {
throw new DecoderException("Odd number of characters.");
}
final byte[] out = new byte[len >> 1];
// two characters form the hex value.
for (int i = 0, j = 0; j < len; i++) {
int f = toDigit(data[j], j) << 4;
j++;
f = f | toDigit(data[j], j);
j++;
out[i] = (byte) (f & 0xFF);
}
return out;
}
protected static int toDigit(final char ch, final int index) throws DecoderException {
final int digit = Character.digit(ch, 16);
if (digit == -1) {
throw new DecoderException("Illegal hexadecimal character " + ch + " at index " + index);
}
return digit;
}
given the following string 01e703000000000000
the byte array should contain
[1, -25, 3, 0, 0, 0, 0, 0, 0]
I thought this may be standard hex decoding so I used the usual decoding function I've been using
extension String {
/// A data representation of the hexadecimal bytes in this string.
var hexDecodedData: Data {
// Get the UTF8 characters of this string
let chars = Array(utf8)
// Keep the bytes in an UInt8 array and later convert it to Data
var bytes = [UInt8]()
bytes.reserveCapacity(count / 2)
// It is a lot faster to use a lookup map instead of stratal
let map: [UInt8] = [
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, // 01234567
0x08, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 89:;<=>?
0x00, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x00, // #ABCDEFG
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // HIJKLMNO
]
// Grab two characters at a time, map them and turn it into a byte
for i in stride(from: 0, to: count, by: 2) {
let index1 = Int(chars[i] & 0x1F ^ 0x10)
let index2 = Int(chars[i + 1] & 0x1F ^ 0x10)
bytes.append(map[index1] << 4 | map[index2])
}
return Data(bytes)
}
}
This results in
[1, 231, 3, 0, 0, 0, 0, 0, 0]
So the I tried converting the java code to swift myself
extension String {
// public static byte[] decodeHex(final char[] data) throws DecoderException
func decodeHex() throws -> [Int] {
let stringArray = Array(self)
let len = count
var out: [Int?] = Array(repeating: nil, count: len >> 1)
if (len & 0x01) != 0 {
throw HExDecodingError.oddNumberOfCharacters
}
var i = 0
var j = 0
while j < len {
var f: Int = try Self.toDigit(char: String(stringArray[j]), index: j)
j += 1
f = f | (try Self.toDigit(char: String(stringArray[j]), index: j))
j += 1
out[i] = f & 0xFF
i += 1
}
return out.compactMap { $0 }
}
enum HExDecodingError: Error {
case oddNumberOfCharacters
case illegalCharacter(String)
case conversionToDogotFailed(String, Int)
}
static func toDigit(char: String, index: Int) throws -> Int {
let digit = Int(char, radix: 16)
if digit == -1 {
throw HExDecodingError.illegalCharacter(char)
}
guard let digit = digit else {
throw HExDecodingError.conversionToDogotFailed(char, index)
}
return digit
}
}
Which results in
[1, 15, 3, 0, 0, 0, 0, 0, 0]
What is going on? what am I doing wrong
EDIT:
Also how can there possibly be a negative number in there since a byte array is represented as a [UInt8]
The Java byte primitive type
is an 8-bit signed two's complement integer. It has a minimum value of -128 and a maximum value of 127 (inclusive).
(Java, in general, does not have unsigned primitive types, only signed ones.)
In your Java output, the -25 value corresponds to hex E7 in your string, whose decimal value is E * 16 + 7 = 14 * 16 + 7 = 231; 231 is outside of the [-128, 127] range, and wraps around to -25. (More precisely the bit pattern of unsigned 8-bit 231 corresponds to the bit pattern of signed 8-bit -25 in two's-complement.)
In Swift, you're using a UInt8 to represent results (both explicitly, and implicitly in a Data value), and the range of UInt8 is [0, 255]; 231 fits within this range, and is what you see in your first Swift code snippet. The results are bitwise equivalent, but if you need results which are type equivalent to what you're seeing in Java, you'll need to work in terms of Int8 instead of UInt8.

snprintf() return value when size=0

Thanks first for your time spent here. I have a question with snprintf() when size=0, with code below:
#include <stdio.h>
#include <stdlib.h>
int main(int ac, char **av)
{
char *str;
int len;
len = snprintf(NULL, 0, "%s %d", *av, ac);
printf("this string has length %d\n", len);
if (!(str = malloc((len + 1) * sizeof(char))))
return EXIT_FAILURE;
len = snprintf(str, len + 1, "%s %d", *av, ac);
printf("%s %d\n", str, len);
free(str);
return EXIT_SUCCESS;
}
when I run:
momo#xue5:~/TestCode$ ./Test_snprintf
The result is:
this string has length 17
./Test_snprintf 1 17
What confuses me is in the code, the size to be written is 0, why displayed 17?
What did I miss?
Thanks~~
The solution can be found in the man page under Return value;
The functions snprintf() and vsnprintf() do not write more than size bytes (including the terminating null byte ('\0')). If the output was truncated due to this limit then the return value is the number of characters (excluding the terminating null byte) which would have been written to the final string if enough space had been available.
This is so that you can do exactly what you do, a "trial print" to get the correct length, then allocate the buffer dynamically to get the whole output when you snprintf again to the allocated buffer.

How to store int in char * in iphone

Can anyone help converting the Int to char array
as i have buffer as
char *buffer = NULL;
int lengthOfComponent = -1;
char *obj;
buffer[index]= (char *)&lengthOfComponent;
if i do this it is thorwing EXCESS BAD ACCESS after the execution how to store the value of the obj to buffer using memcpy
Of course you cannot write in buffer[index], it is not allocated!
buffer = malloc(sizeof(char) * lengthOfBuffer);
should do it. After that you can write the buffer with memcpy or with an assignation, like you are doing.
buffer[index] = (char *)&lengthOfComponent;
buffer[index] is like dereferencing the pointer. But buffer is not pointing to any valid location. Hence the runtime error.
The C solution is using snprintf. Try -
int i = 11;
char buffer[10];
snprintf(buffer, sizeof(buffer), "%d", i);

Xor between 2 NSString gives wrong result

I have this method to make a xor between 2 NSStrings, i´m printing the result on NSLog but it isn´t the expect.
Can´t figure out what i´m doing wrong.
(void)XorSecretKeyDeviceId
{
NSString* secretKey = #"123";//
NSString* deviceId = #"abcdef";//
NSData* stringKey = [secretKey dataUsingEncoding:NSUTF8StringEncoding];
NSData* stringDeviceId = [deviceId dataUsingEncoding:NSUTF8StringEncoding];
unsigned char* pBytesInput = (unsigned char*)[stringKey bytes]; //Bytes
unsigned char* pBytesKey = (unsigned char*)[stringDeviceId bytes];
unsigned int vlen = [secretKey length]; //Keys Length
unsigned int klen = [deviceId length];
unsigned int v;
unsigned int k = vlen % klen;
unsigned char c;
for(v = 0; v < vlen; v++)
{
c = pBytesInput[v] ^ pBytesKey[k];
pBytesInput[v] = c;
NSLog(#"%c", c);
k = (++k < klen ? k : 0);
}
}
Are you setting your pBytesInput and pBytesKey variables correctly? At the moment, you have unsigned char* pBytesInput = (unsigned char*)[stringKey bytes]; (i.e. the input is the "key"), and pBytesKey is the device ID. This seems odd.
Also, be careful using UTF-8 encoding. UTF-8 uses the high bit on any byte in the string to indicate a "continuation" of a multi-byte character into the next byte. Your encoding could plausibly generate invalid UTF-8 by giving the setting the high bit of the final byte in the encryption.
For more than that, you'll have to say what the "wrong result" is.

Writing BMP files (Platform Independent)

I have a structure such as
typedef struct FT_Bitmap_
{
int rows;
int width;
int pitch;
unsigned char* buffer;
short num_grays;
char pixel_mode;
char palette_mode;
void* palette;
} FT_Bitmap;
defining my bitmap data
I want to create a valid bmp file from this structure. How can I do that?
Take a look at:
http://en.wikipedia.org/wiki/BMP_file_format
Write out the header, the palette and the data.
Just take care when you write the bitmap data. It's "upside-down" - the first pixel in data is the left-bottom corner pixel.
First declare three types of initializers you will mostly use:
typedef unsigned char byte;
typedef unsigned short word;
typedef unsigned long dword;
As for the structs, i suggest you using three, each for the element of the BMP:
typedef struct BITMAPINFOHEADER
{
dword bmp_size;
word bmp_app1;
word bmp_app2;
dword bmp_offset;
} BMP;
typedef struct DIB_HEADER
{
dword dib_size;
dword dib_w;
dword dib_h;
word dib_planes;
word dib_bits;
dword dib_compression;
dword dib_rawsize;
dword dib_xres;
dword dib_yres;
dword dib_palette;
dword dib_important;
} DIB;
typedef struct PIXEL_ARRAY
{
byte B;
byte G;
byte R;
} PIX;
Then you can manipulate the image in many ways. You can make an 1D/2D array to contain the data or just manipulate the bmp directly. The following code makes 500x460 colored blank bmp directly:
void new (char NAME[] , byte RED , byte GREEN , byte BLUE)
{
char build_name[256];
const char* id = "BM";
int i, j;
char debug[128];
FILE* fp;
dword rsize = (500 * sizeof(PIX) + 3) / 4 * 4;
dword pad = rsize - 500 * sizeof(PIX);
dword rawsize = rsize * 460 * sizeof(PIX);
byte zero[3] = {0};
dword size = (2) + sizeof(BMP) + sizeof(DIB) + sizeof(PIX);
BMP bmp[] = { 2+sizeof(BMP)+sizeof(DIB)+(sizeof(rawsize)/4), 0, 0, 2+sizeof(BMP)+sizeof(DIB) }; //2+ [!]
DIB dib[] = { sizeof(DIB), 500, 460, 1, 24, 0, (sizeof(rawsize)/4), 2835, 2835, 0, 0 };
PIX pix;
pix.R = RED;
pix.G = GREEN;
pix.B = BLUE;
sprintf(build_name, "%s.bmp", NAME);
fp = fopen(build_name, "wb");
if(!fp) strcpy(debug, "Access Denied.");
else strcpy(debug, "Saved.");
fwrite(id, 1, 2, fp);
fwrite(&bmp, 1, sizeof(bmp), fp);
fwrite(&dib, 1, sizeof(dib), fp);
for(i = 0; i < 460; i++)
{
for(j = 0; j < 500; j++)
{
fwrite(&pix, sizeof(pix), 1, fp);
}
if (pad) fwrite(zero, 1, pad, fp);
}
fclose(fp);
}
The point is using fseek() to locate the pixel and fwrite() to write pixels directly.