how to use SHA256 with salt(some key) in swift - swift

I found we can hash some string with CommonCrypto.
and I see some examples but they don't use salt.
how can i use the SHA256 with salt?

Complete solution for Swift 4:
extension Data {
var hexString: String {
return map { String(format: "%02hhx", $0) }.joined()
}
var sha256: Data {
var digest = [UInt8](repeating: 0, count: Int(CC_SHA256_DIGEST_LENGTH))
self.withUnsafeBytes({
_ = CC_SHA256($0, CC_LONG(self.count), &digest)
})
return Data(bytes: digest)
}
}
extension String {
func sha256(salt: String) -> Data {
return (self + salt).data(using: .utf8)!.sha256
}
}
Example:
let hash = "test".sha256(salt: "salt").hexString

Combine your indata with a salt and run the hash calculation;
func hash(input: String, salt: String) -> String {
let toHash = input + salt
// TODO: Calculate the SHA256 hash of "toHash" and return it
// return sha256(toHash)
// Return the input data and hash for now
return toHash
}
print(hash("somedata", salt: "1m8f")) // Prints "somedata1m8f"

Related

Get just the hash from SHA256.hash(data:)?

I'm trying to get just a hash from SHA256.hash(data:) but in order to do this I need to grab the description and then use .replacingOccurrences(of: "SHA256 digest: ", with: ""). Is there a way that I can just get the full SHA256 hash as a string?
func getId<T>(input: T) -> String {
let input = "\(input)".utf8
let data = Data(input)
let hash = SHA256.hash(data: data).description.replacingOccurrences(of: "SHA256 digest: ", with: "")
return hash
}
You can simply map SHA256Digest bytes into an hexa string:
import CryptoKit
extension UnsignedInteger where Self: CVarArg {
var hexa: String { .init(format: "%ll*0x", bitWidth / 4, self) }
}
extension DataProtocol {
var sha256Digest: SHA256Digest { SHA256.hash(data: self) }
var sha256Data: Data { .init(sha256Digest) }
var sha256Hexa: String { sha256Digest.map(\.hexa).joined() }
}
func getId<D: DataProtocol>(data: D) -> String {
data.sha256Hexa
}

HMAC SHA 256 returns different value from javascript

I need to generate the same hash value as the site below.
https://cryptii.com/pipes/hmac
If the key is aaaa and the message is a
The expected hash value is e29f14beeb21a8ee1d1c3b8c2be4cf440584da4d46aff5bacb2ae9aa7deb3271.
But the result is 8c21ecf95763195811ac0513bfa42a29be13b9d895b896af45e115dde9bc7382
Below is my code I don't know what's wrong.
I am wasting 6 hours now... so PLEASE HELP ME... : (
import CommonCrypto
import CryptoKit
enum CryptoAlgorithm {
case MD5, SHA1, SHA224, SHA256, SHA384, SHA512
var HMACAlgorithm: CCHmacAlgorithm {
var result: Int = 0
switch self {
case .MD5: result = kCCHmacAlgMD5
case .SHA1: result = kCCHmacAlgSHA1
case .SHA224: result = kCCHmacAlgSHA224
case .SHA256: result = kCCHmacAlgSHA256
case .SHA384: result = kCCHmacAlgSHA384
case .SHA512: result = kCCHmacAlgSHA512
}
return CCHmacAlgorithm(result)
}
var digestLength: Int {
var result: Int32 = 0
switch self {
case .MD5: result = CC_MD5_DIGEST_LENGTH
case .SHA1: result = CC_SHA1_DIGEST_LENGTH
case .SHA224: result = CC_SHA224_DIGEST_LENGTH
case .SHA256: result = CC_SHA256_DIGEST_LENGTH
case .SHA384: result = CC_SHA384_DIGEST_LENGTH
case .SHA512: result = CC_SHA512_DIGEST_LENGTH
}
return Int(result)
}
}
extension String {
func hmac(algorithm: CryptoAlgorithm, key: String) -> String {
let str = self.cString(using: String.Encoding.utf8)
let strLen = Int(self.lengthOfBytes(using: String.Encoding.utf8))
let digestLen = algorithm.digestLength
let result = UnsafeMutablePointer<CUnsignedChar>.allocate(capacity: digestLen)
let keyStr = key.cString(using: String.Encoding.utf8)
let keyLen = Int(key.lengthOfBytes(using: String.Encoding.utf8))
CCHmac(algorithm.HMACAlgorithm, keyStr!, keyLen, str!, strLen, result)
let digest = stringFromResult(result: result, length: digestLen)
result.deallocate()
result.deinitialize(count: digestLen)
return digest
}
private func stringFromResult(result: UnsafeMutablePointer<CUnsignedChar>, length: Int) -> String {
let hash = NSMutableString()
for i in 0..<length {
hash.appendFormat("%02x", result[i])
}
return String(hash).lowercased()
}
}
public extension Data {
/**
Method creates bytes array from given Data
- Returns: Array of bytes
*/
func bytesArray<T: ExpressibleByIntegerLiteral>() -> [T] {
var bytes = Array<T>(repeating: 0, count: self.count)
(self as NSData).getBytes(&bytes, length:self.count * MemoryLayout<T>.size)
return bytes
}
}
public extension String {
/**
Method creates bytes array from given String
- Returns: Array of bytes
*/
func bytesArray<T: ExpressibleByIntegerLiteral>() -> [T] {
let data = self.data(using: String.Encoding.utf8)!
return data.bytesArray()
}
}
extension Data {
struct HexEncodingOptions: OptionSet {
let rawValue: Int
static let upperCase = HexEncodingOptions(rawValue: 1 << 0)
}
func hexEncodedString(options: HexEncodingOptions = []) -> String {
let format = options.contains(.upperCase) ? "%02hhX" : "%02hhx"
return map { String(format: format, $0) }.joined()
}
var hexDescription: String {
return reduce("") {$0 + String(format: "%02x", $1)}
}
}
print("a".hmac(algorithm: .SHA256, key: Data("aaaa".utf8).hexEncodedString()))
print("a".hmac(algorithm: .SHA256, key: "aaaa").data(using: .utf8)?.base64EncodedString().description)

How to convert binary representation string of 0/1 to string?

I wants to convert String to binary(0/1 representation) and reverse.
This is my code for converting String to binary.
let String_Data: String = UI_Data.text!
let Binary_Data: Data? = String_Data.data(using: .utf8, allowLossyConversion: false)!
let String_Binary_Data = Binary_Data?.reduce("") { (acc, byte) -> String in
acc + String(byte, radix: 2)
}
But I do not know how to do the opposite. I would be very grateful if you could give me advice for this.
I would start with something like this, although the performance probably isn't spectacular because it involves so many small intermediate strings.
import Foundation
extension UInt8 {
var binaryString: String {
return String(repeating: "0", count: self.leadingZeroBitCount) + String(self, radix: 2)
}
}
extension Data {
var binaryString: String {
return self.map { $0.binaryString }.joined()
}
}
let exampleString = "abcdefghijklmnopqrstuvwxyz"
let exampleData = exampleString.data(using: .utf8)!
print(exampleData.binaryString)

HMAC SHA512 using CommonCrypto in Swift 3.1 [duplicate]

This question already has answers here:
CommonHMAC in Swift
(11 answers)
Closed 5 years ago.
I'm trying to encrypt data to send to the API.
The API requires the data to be sent as hmac_sha512 encrypted hash.
I've found various examples of how it possibly could have been done for sha1 and others (not sha512) and also in older versions of Swift.
None of the examples that I tried work for swift 3.1
Any help in the right direction will be appreciated.
Edit:
In PHP, I successfully send it using:
$sign = hash_hmac('sha512', $post_data, $this->secret);
Edit 2:
I did add briding header, I don't know what to do next! As the code examples followed after that don't work for swift 3.1 :(
Edit 3:
Solved! Guess what, I was creating briding header incorrectly! :(
P.S I'm trying to avoid CryptoSwift, focusing on CommonCrypto.
The answer given below is not proper, as it doesn't allow hmac to get a key for encryption. I did research and finally got it working. This post contains the working example project for hmac: https://github.com/nabtron/hmacTest
I think the best thing to do is using Crypto pod which is a wrapper for common crypto. In case you want to use directly commonCrypto you should add the bridging header to the project and import common crypto using: #import <CommonCrypto/CommonCrypto.h>
Edit 1
Create a swift class and add the following code to it:
import Foundation
extension String {
var md5: String {
return HMAC.hash(inp: self, algo: HMACAlgo.MD5)
}
var sha1: String {
return HMAC.hash(inp: self, algo: HMACAlgo.SHA1)
}
var sha224: String {
return HMAC.hash(inp: self, algo: HMACAlgo.SHA224)
}
var sha256: String {
return HMAC.hash(inp: self, algo: HMACAlgo.SHA256)
}
var sha384: String {
return HMAC.hash(inp: self, algo: HMACAlgo.SHA384)
}
var sha512: String {
return HMAC.hash(inp: self, algo: HMACAlgo.SHA512)
}
}
public struct HMAC {
static func hash(inp: String, algo: HMACAlgo) -> String {
if let stringData = inp.data(using: String.Encoding.utf8, allowLossyConversion: false) {
return hexStringFromData(input: digest(input: stringData as NSData, algo: algo))
}
return ""
}
private static func digest(input : NSData, algo: HMACAlgo) -> NSData {
let digestLength = algo.digestLength()
var hash = [UInt8](repeating: 0, count: digestLength)
switch algo {
case .MD5:
CC_MD5(input.bytes, UInt32(input.length), &hash)
break
case .SHA1:
CC_SHA1(input.bytes, UInt32(input.length), &hash)
break
case .SHA224:
CC_SHA224(input.bytes, UInt32(input.length), &hash)
break
case .SHA256:
CC_SHA256(input.bytes, UInt32(input.length), &hash)
break
case .SHA384:
CC_SHA384(input.bytes, UInt32(input.length), &hash)
break
case .SHA512:
CC_SHA512(input.bytes, UInt32(input.length), &hash)
break
}
return NSData(bytes: hash, length: digestLength)
}
private static func hexStringFromData(input: NSData) -> String {
var bytes = [UInt8](repeating: 0, count: input.length)
input.getBytes(&bytes, length: input.length)
var hexString = ""
for byte in bytes {
hexString += String(format:"%02x", UInt8(byte))
}
return hexString
}
}
enum HMACAlgo {
case MD5, SHA1, SHA224, SHA256, SHA384, SHA512
func digestLength() -> Int {
var result: CInt = 0
switch self {
case .MD5:
result = CC_MD5_DIGEST_LENGTH
case .SHA1:
result = CC_SHA1_DIGEST_LENGTH
case .SHA224:
result = CC_SHA224_DIGEST_LENGTH
case .SHA256:
result = CC_SHA256_DIGEST_LENGTH
case .SHA384:
result = CC_SHA384_DIGEST_LENGTH
case .SHA512:
result = CC_SHA512_DIGEST_LENGTH
}
return Int(result)
}
}
then use it simply by stringName.sha512
this class extends the String class which gives the ability to use the hashing as a function in the string class.

Using MD5 hash value on an ansi string in Swift 3

I have a small function that takes a string and returns its MD5 hash value. The problem is, that it expects a UTF8 string, and I need it to calculate a hash value of a byte array encoded with iso-8859-1 (~ansi).
How can I change the following code to accept a byte array of characters, then return its hashed value?
static func md5(_ string: String) -> String {
let context = UnsafeMutablePointer<CC_MD5_CTX>.allocate(capacity: 1)
var digest = Array<UInt8>(repeating:0, count:Int(CC_MD5_DIGEST_LENGTH))
CC_MD5_Init(context)
CC_MD5_Update(context, string, CC_LONG(string.lengthOfBytes(using: String.Encoding.utf8)))
CC_MD5_Final(&digest, context)
context.deallocate(capacity: 1)
var hexString = ""
for byte in digest {
hexString += String(format:"%02x", byte)
}
return hexString
}
The tricky part is the CC_MD5_Update call. Thanks.
You can easily modify your function to take an arbitrary byte
array as argument. CC_MD5_Update is mapped to Swift as
func CC_MD5_Update(_ c: UnsafeMutablePointer<CC_MD5_CTX>!, _ data: UnsafeRawPointer!, _ len: CC_LONG) -> Int32
and you can pass an array as the UnsafeRawPointer parameter:
func md5(bytes: [UInt8]) -> String {
var context = CC_MD5_CTX()
var digest = Array<UInt8>(repeating:0, count:Int(CC_MD5_DIGEST_LENGTH))
CC_MD5_Init(&context)
CC_MD5_Update(&context, bytes, CC_LONG(bytes.count))
CC_MD5_Final(&digest, &context)
return digest.map { String(format: "%02hhx", $0) }.joined()
}
(I have also simplified it a bit.)
Alternatively, pass a Data argument:
func md5(data: Data) -> String {
var context = CC_MD5_CTX()
var digest = Array<UInt8>(repeating:0, count:Int(CC_MD5_DIGEST_LENGTH))
CC_MD5_Init(&context)
data.withUnsafeBytes {
_ = CC_MD5_Update(&context, $0, CC_LONG(data.count))
}
CC_MD5_Final(&digest, &context)
return digest.map { String(format: "%02hhx", $0) }.joined()
}
which can then be used as
let s = "foo"
if let data = s.data(using: .isoLatin1) {
let hash = md5(data: data)
print(hash)
}
Update for Swift 5:
import CommonCrypto
func md5(data: Data) -> String {
var context = CC_MD5_CTX()
var digest = Array<UInt8>(repeating:0, count:Int(CC_MD5_DIGEST_LENGTH))
CC_MD5_Init(&context)
data.withUnsafeBytes {
_ = CC_MD5_Update(&context, $0.baseAddress, CC_LONG(data.count))
}
CC_MD5_Final(&digest, &context)
return digest.map { String(format: "%02hhx", $0) }.joined()
}
If you are sure your 'string' contains only utf8 characters, call CC_MD5_Update with string.utf8 so:
CC_MD5_Update(context, string.utf8, CC_LONG(string.lengthOfBytes(using: String.Encoding.utf8)))
Strings in swift are 'interesting', this is a good read on the topic: https://oleb.net/blog/2016/08/swift-3-strings/
// requires a bridging header with this:
// #import <CommonCrypto/CommonCrypto.h>
func MD5(_ string: String) -> String? {
let length = Int(CC_MD5_DIGEST_LENGTH)
var digest = [UInt8](repeating: 0, count: length)
if let d = string.data(using: String.Encoding.utf8) {
d.withUnsafeBytes { (body: UnsafePointer<UInt8>) in
CC_MD5(body, CC_LONG(d.count), &digest)
}
}
return (0..<length).reduce("") {
$0 + String(format: "%02x", digest[$1])
}
}
Justin Answer : https://gist.github.com/jstn/787da74ab4be9d4cf3cb