I am reading the publication FIPS 180-4 and trying to implement SHA-256 on my own. On the page 15, title 5.3.3 SHA-256 we have an initialization of initial hash values:
5.3.3 SHA-256
For SHA-256, the initial hash value, H(0), shall consist of the following eight 32-bit words, in hex:
H0 = 6a09e667
H1 = bb67ae85
H2 = 3c6ef372
H3 = a54ff53a
H4 = 510e527f
H5 = 9b05688c
H6 = 1f83d9ab
H7 = 5be0cd19
These words were obtained by taking the first thirty-two bits of the fractional parts of the
square roots of the first eight prime numbers.
So, I am starting to calculate these numbers on my own and then my question appears. According to Wikipedia : Double-precision floating-point format the number represented in binary as fractional part by 0-51 bits, exponent part by 52-62 bits and sign part by 63 bit. So in order to initialize initial hash values we take first 32 bits of fractional part or in other words 20-51 bits:
H0 = 6a09e667 (hex)
sqrt(2) = 1.4142135623730951 = [0][01111111111][01101010000010011110011001100111|11110011101111001101]
H0 = 01101010000010011110011001100111 (binary)
H0'= 01101010000010011110011001100111 (binary)
H0 == H0' -> OK
-------------------
H1 = bb67ae85 (hex)
sqrt(3) = 1.7320508075688772 = [0][01111111111][10111011011001111010111010000101|10000100110010101010]
H1 = 10111011011001111010111010000101 (binary)
H1'= 10111011011001111010111010000101 (binary)
H1 == H1' -> OK
-------------------
H2 = 3c6ef372 (hex)
sqrt(5) = 2.23606797749979 = [0][10000000000][00011110001101110111100110111001|01111111010010101000]
H2 = 00111100011011101111001101110010 (binary)
H2'= 00011110001101110111100110111001 (binary)
H2 != H2' -> SHIFTED!
-------------------
H3 = a54ff53a (hex)
sqrt(7) = 2.6457513110645907 = [0][10000000000][01010010101001111111101010011101|00101111100011101010]
H3 = 10100101010011111111010100111010 (binary)
H3'= 01010010101001111111101010011101 (binary)
H3 != H3' -> SHIFTED!
-------------------
H4 = 510e527f (hex)
sqrt(11) = 3.3166247903554 = [0][10000000000][10101000100001110010100100111111|11010110111100110100]
H4 = 01010001000011100101001001111111 (binary)
H4'= 10101000100001110010100100111111 (binary)
H4 != H4' -> SHIFTED!
-------------------
H5 = 9b05688c (hex)
sqrt(13) = 3.605551275463989 = [0][10000000000][11001101100000101011010001000110|00010101100111110011]
H5 = 10011011000001010110100010001100 (binary)
H5'= 11001101100000101011010001000110 (binary)
H5 != H5' -> SHIFTED!
-------------------
H6 = 1f83d9ab (hex)
sqrt(17) = 4.123105625617661 = [0][10000000001][00000111111000001111011001101010|11111110110100000111]
H6 = 00011111100000111101100110101011 (binary)
H6'= 00000111111000001111011001101010 (binary)
H6 != H6' -> SHIFTED!
-------------------
H7 = 5be0cd19 (hex)
sqrt(19) = 4.358898943540674 = [0][10000000001][00010110111110000011001101000110|01000100110111111001]
H7 = 01011011111000001100110100011001 (binary)
H7'= 00010110111110000011001101000110 (binary)
H7 != H7' -> SHIFTED!
-------------------
The question is why some of given binary representations of H in FIPS 180-4 are (seems like) shifted somehow or, I think I am taking fractional parts correctly, but if not - what I am doing wrong?
The code I wrote is below:
package ru.iulgutlin.sha256;
import java.util.Arrays;
import java.util.Objects;
/**
* Test
*
* #author Rail Iulgutlin 1/9/22
*/
public class Test {
private static final long[] PRIMES = new long[64];
/**
* Given H values
*/
private static final long h0 = 0x6a09e667L;
private static final long h1 = 0xbb67ae85L;
private static final long h2 = 0x3c6ef372L;
private static final long h3 = 0xa54ff53aL;
private static final long h4 = 0x510e527fL;
private static final long h5 = 0x9b05688cL;
private static final long h6 = 0x1f83d9abL;
private static final long h7 = 0x5be0cd19L;
private static final long[] h = {h0, h1, h2, h3, h4, h5, h6, h7};
static {
initPrimes();
}
public static void main(String[] args) {
long[] s = new long[8];
for (int i = 0; i < s.length; i++) {
/**
* Calculating my own H' values and comparing with given
*/
double sqrt = Math.sqrt(PRIMES[i]);
s[i] = firstFractionalParts(sqrt, 32);
String h = binary(CustomSha256.h[i], 32); // h given binary string representation
String h_ = binary(s[i], 32); // h' calculated binary string representation
boolean eq = Objects.equals(h, h_);
// print result pretty formatted
print(" H" + i + " = " + Long.toHexString(CustomSha256.h[i]) + " (hex)");
print(" sqrt(" + PRIMES[i] + ") = " + sqrt +
" = " + new StringBuilder(binary(Double.doubleToRawLongBits(sqrt), 64))
.insert(0, '[')
.insert(2, "][")
.insert(15, "][")
.insert(49, '|')
.insert(70, ']'));
print(" H" + i + " = " + h + " (binary)");
print(" H" + i + "'= " + h_ + " (binary)");
print(" H" + i + (eq ? " == " : " != ") + "H" + i + "' -> " + (eq ? " OK " : " SHIFTED! "));
print(" -------------------");
}
}
/**
* Returns first k bits of fractional part bits of d.
* <p>
* Example calling with d = 1.7320508075688772 and k = 32:
* d = [0][01111111111][10111011011001111010111010000101|10000100110010101010]
* Returns:
* [10111011011001111010111010000101]
*
* #param d double number
* #param k number of bits
* #return first k bits of fractional part bits of d
*/
private static long firstFractionalParts(double d, int k) {
// first 12 bits are not fractional
// Example: given d = [0][01111111111][10111011011001111010111010000101][10000100110010101010] and k = 32
// first rotate bits to right so that first bit we need will stay in the rightest position
// we get [10000100110010101010][0][01111111111][10111011011001111010111010000101]
// second cut everything redundant and return
// [00000000000000000000][0][00000000000][10111011011001111010111010000101]
return cut(rotate(Double.doubleToRawLongBits(d), Double.SIZE - 12 - k), k);
}
/**
* Cuts(turns to 0) every bit of b except right k bits.
* <p>
* Example calling with b = 10111011001 = 1497 and k = 4
* Returns 00000001001 (only 4 right bits remain)
*
* #param b bits
* #param k nuber of bits to remain
* #return cutted bits
*/
private static long cut(long b, int k) {
return b & (-1L >>> (Long.SIZE - k));
}
/**
* Rotates bits right to a given k positions within 64 bits.
* Example calling with b = 0000000000000000000000000000000000000000000000000000010111011001 = 1497 and k = 3
* Returns 0010000000000000000000000000000000000000000000000000000010111011 (rotated right to 3 positions)
*
* #param bits bits to rotate
* #param k positions
* #return rotated value
*/
private static long rotate(long bits, int k) {
return ((bits >>> k) | (bits << (Long.SIZE - k)));
}
private static String binary(long b, int k) {
return String.format("%" + k + "s", Long.toBinaryString(b & (-1L >>> (Long.SIZE - k)))).replace(' ', '0');
}
private static void initPrimes() {
int i = 0;
long k = 0;
while (i < 64) {
if (isPrime(k)) {
PRIMES[i] = k;
i++;
}
k++;
}
print("PRIMES : " + Arrays.toString(PRIMES));
}
private static boolean isPrime(long a) {
if (a < 2) return false;
if (a == 2) return true;
if (a % 2 == 0) return false;
for (long i = 3; i * i <= a; i += 2)
if (a % i == 0) return false;
return true;
}
private static void print(Object o) {
System.out.println(o);
}
}
√2 = 1.4142135623730951 = 0.4142135623730951 //1 is neglected
H0 = 0.4142135623730951 * 2^32
= 1779033703
= 0x6A09E667 //hex(1779033703)
Related
How to convert a Hexadecimal to Binary values.
Here is my code:
String authSign() {
if (CheckDate % 2 == 0) {
return Signature = H + B + Sk;
} else {
return Signature = B + H + Sk;
}
}
;
var key = utf8.encode(authSign());
var stringSha = sha256.convert(key);
var stringHex = hex.decode(stringSha.toString());
var finalHex = hex.encode(stringHex.toList());
and here is the result:
flutter: dad85aac19b632a71b4759078bf90cbe4fba354582454f0445bc3bb8e3e4c587
and the result I want is:
64616438356161633139623633326137316234373539303738626639306362653466626133353435383234353466303434356263336262386533653463353837
Just use BigInt.parse or BigInt.tryParse and set its radix parameter to 16:
String hex = "dad85aac19b632a71b4759078bf90cbe4fba354582454f0445bc3bb8e3e4c587";
BigInt bin = BigInt.parse(hex,radix: 16);
I’m dealing with BigDecimal in Java and I need to make 2 check against BigDecimal fields in my DTO:
Number of digits of full part (before point) < 15
Total number of
digits < 32 including scale (zeros after point)
What is the best way to implement it? I extremely don’t want toBigInteger().toString() and .toString()
I think this will work.
BigDecimal d = new BigDecimal("921229392299229.2922929292920000");
int fractionCount = d.scale();
System.out.println(fractionCount);
int wholeCount = (int) (Math.ceil(Math.log10(d.longValue())));
System.out.println(wholeCount);
I did some testing of the above method vs using indexOf and subtracting lengths of strings. The above seems to be signficantly faster if my testing methodology is reasonable. Here is how I tested it.
Random r = new Random(29);
int nRuns = 1_000_000;
// create a list of 1 million BigDecimals
List<BigDecimal> testData = new ArrayList<>();
for (int j = 0; j < nRuns; j++) {
String wholePart = r.ints(r.nextInt(15) + 1, 0, 10).mapToObj(
String::valueOf).collect(Collectors.joining());
String fractionalPart = r.ints(r.nextInt(31) + 1, 0, 10).mapToObj(
String::valueOf).collect(Collectors.joining());
BigDecimal d = new BigDecimal(wholePart + "." + fractionalPart);
testData.add(d);
}
long start = System.nanoTime();
// Using math
for (BigDecimal d : testData) {
int fractionCount = d.scale();
int wholeCount = (int) (Math.ceil(Math.log10(d.longValue())));
}
long time = System.nanoTime() - start;
System.out.println(time / 1_000_000.);
start = System.nanoTime();
//Using strings
for (BigDecimal d : testData) {
String sd = d.toPlainString();
int n = sd.indexOf(".");
int m = sd.length() - n - 1;
}
time = System.nanoTime() - start;
System.out.println(time / 1_000_000.);
}
Hi i have been wondering if there is a way in which to convert binary numbers into decimal fractions.
I know how to change base as an example through this code
String binary = "11110010";
//I'd like to change this line so it produces a decimal value
String denary = int.parse(binary, radix: 2).toRadixString(10);
If anyone still wondering how to convert decimal to binary and the inverse:
print(55.toRadixString(2)); // Outputs 110111
print(int.parse("110111", radix: 2)); Outputs 55
int binaryToDecimal(int n)
{
int num = n;
int dec_value = 0;
// Initializing base value to 1, i.e 2^0
int base = 1;
int temp = num;
while (temp) {
int last_digit = temp % 10;
temp = temp / 10;
dec_value += last_digit * base;
base = base * 2;
}
return dec_value;
}
int main()
{
int num = 10101001;
cout << binaryToDecimal(num) << endl;
}
This is my c++ solution but you can implement any language
Google implemented a captcha to block people from accessing the TTS translate API https://translate.google.com/translate_tts?ie=UTF-8&q=test&tl=zh-TW. I was using it in my mobile application. Now, it is not returning anything. How do I get around the captcha?
Add the qualifier '&client=tw-ob' to the end of your query.
https://translate.google.com/translate_tts?ie=UTF-8&q=test&tl=zh-TW&client=tw-ob
This answer no longer works consistently. Your ip address will be blocked by google temporarily if you abuse this too much.
there are 3 main issues:
you must include "client" in your query string (client=t seems to work).
(in case you are trying to retrieve it using AJAX) the Referer of the HTTP request must be https://translate.google.com/
"tk" field changes for every query, and it must be populated with a matching hash:
tk = hash(q, TKK), where q is the text to be TTSed, and TKK is a var in the global scope when you load translate.google.com: (type 'window.TKK' in the console). see the hash function at the bottom of this reply (calcHash).
to summarize:
function generateGoogleTTSLink(q, tl, tkk) {
var tk = calcHash(q, tkk);
return `https://translate.google.com/translate_tts?ie=UTF-8&total=1&idx=0&client=t&ttsspeed=1&tl=${tl}&tk=${tk}&q=${q}&textlen=${q.length}`;
}
generateGoogleTTSLink('ciao', 'it', '410353.1336369826');
// see definition of "calcHash" in the bottom of this comment.
=> to get your hands on a TKK, you can open Google Translate website, then type "TKK" in developer tools' console (e.g.: "410353.1336369826").
NOTE that TKK value changes every hour, and so, old TKKs might get blocked at some point, and refreshing it may be necessary (although so far it seems like old keys can work for a LONG time).
if you DO wish to periodically refresh TKK, it can be automated pretty easily, but not if you're running your code from the browser.
you can find a full NodeJS implementation here:
https://github.com/guyrotem/google-translate-server.
it exposes a minimal TTS API (query, language), and is deployed to a free Heroku server, so you can test it online if you like.
function shiftLeftOrRightThenSumOrXor(num, opArray) {
return opArray.reduce((acc, opString) => {
var op1 = opString[1]; // '+' | '-' ~ SUM | XOR
var op2 = opString[0]; // '+' | '^' ~ SLL | SRL
var xd = opString[2]; // [0-9a-f]
var shiftAmount = hexCharAsNumber(xd);
var mask = (op1 == '+') ? acc >>> shiftAmount : acc << shiftAmount;
return (op2 == '+') ? (acc + mask & 0xffffffff) : (acc ^ mask);
}, num);
}
function hexCharAsNumber(xd) {
return (xd >= 'a') ? xd.charCodeAt(0) - 87 : Number(xd);
}
function transformQuery(query) {
for (var e = [], f = 0, g = 0; g < query.length; g++) {
var l = query.charCodeAt(g);
if (l < 128) {
e[f++] = l; // 0{l[6-0]}
} else if (l < 2048) {
e[f++] = l >> 6 | 0xC0; // 110{l[10-6]}
e[f++] = l & 0x3F | 0x80; // 10{l[5-0]}
} else if (0xD800 == (l & 0xFC00) && g + 1 < query.length && 0xDC00 == (query.charCodeAt(g + 1) & 0xFC00)) {
// that's pretty rare... (avoid ovf?)
l = (1 << 16) + ((l & 0x03FF) << 10) + (query.charCodeAt(++g) & 0x03FF);
e[f++] = l >> 18 | 0xF0; // 111100{l[9-8*]}
e[f++] = l >> 12 & 0x3F | 0x80; // 10{l[7*-2]}
e[f++] = l & 0x3F | 0x80; // 10{(l+1)[5-0]}
} else {
e[f++] = l >> 12 | 0xE0; // 1110{l[15-12]}
e[f++] = l >> 6 & 0x3F | 0x80; // 10{l[11-6]}
e[f++] = l & 0x3F | 0x80; // 10{l[5-0]}
}
}
return e;
}
function normalizeHash(encondindRound2) {
if (encondindRound2 < 0) {
encondindRound2 = (encondindRound2 & 0x7fffffff) + 0x80000000;
}
return encondindRound2 % 1E6;
}
function calcHash(query, windowTkk) {
// STEP 1: spread the the query char codes on a byte-array, 1-3 bytes per char
var bytesArray = transformQuery(query);
// STEP 2: starting with TKK index, add the array from last step one-by-one, and do 2 rounds of shift+add/xor
var d = windowTkk.split('.');
var tkkIndex = Number(d[0]) || 0;
var tkkKey = Number(d[1]) || 0;
var encondingRound1 = bytesArray.reduce((acc, current) => {
acc += current;
return shiftLeftOrRightThenSumOrXor(acc, ['+-a', '^+6'])
}, tkkIndex);
// STEP 3: apply 3 rounds of shift+add/xor and XOR with they TKK key
var encondingRound2 = shiftLeftOrRightThenSumOrXor(encondingRound1, ['+-3', '^+b', '+-f']) ^ tkkKey;
// STEP 4: Normalize to 2s complement & format
var normalizedResult = normalizeHash(encondingRound2);
return normalizedResult.toString() + "." + (normalizedResult ^ tkkIndex)
}
// usage example:
var tk = calcHash('hola', '409837.2120040981');
console.log('tk=' + tk);
// OUTPUT: 'tk=70528.480109'
You can also try this format :
pass q= urlencode format of your language
(In JavaScript you can use the encodeURI() function & PHP has the rawurlencode() function)
pass tl = language short name (suppose bangla = bn)
Now try this :
https://translate.google.com.vn/translate_tts?ie=UTF-8&q=%E0%A6%A2%E0%A6%BE%E0%A6%95%E0%A6%BE+&tl=bn&client=tw-ob
First, to avoid captcha, you have to set a proper user-agent like: "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:46.0) Gecko/20100101 Firefox/46.0"
Then to not being blocked you must provide a proper token ("tk" get parameter) for each single request.
On the web you can find many different kind of scripts that try to calculate the token after a lot of reverse engineering...but every time the big G change the algorithm you're stuck again, so it's much easier to retrieve your token just observing in deep similar requests to translate page (with your text in the url).
You can read the token time by time grepping "tk=" from the output of this simple code with phantomjs:
"use strict";
var page = require('webpage').create();
var system = require('system');
var args = system.args;
if (args.length != 2) { console.log("usage: "+args[0]+" text"); phantom.exit(1); }
page.onConsoleMessage = function(msg) { console.log(msg); };
page.onResourceRequested = function(request) { console.log('Request ' + JSON.stringify(request, undefined, 4)); };
page.open("https://translate.google.it/?hl=it&tab=wT#fr/it/"+args[1], function(status) {
if (status === "success") { phantom.exit(0); }
else { phantom.exit(1); }
});
so in the end you can get your speech with something like:
wget -U "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:46.0) Gecko/20100101 Firefox/46.0"
"http://translate.google.com/translate_tts?ie=UTF-8&tl=it&tk=52269.458629&q=ciao&client=t" -O ciao.mp3
(token are probably time based so this link may not work tomorrow)
I rewrote Guy Rotem's answer in Java, so if you prefer Java over Javascript, feel free to use:
public class Hasher {
public long shiftLeftOrRightThenSumOrXor(long num, String[] opArray) {
long result = num;
int current = 0;
while (current < opArray.length) {
char op1 = opArray[current].charAt(1); // '+' | '-' ~ SUM | XOR
char op2 = opArray[current].charAt(0); // '+' | '^' ~ SLL | SRL
char xd = opArray[current].charAt(2); // [0-9a-f]
assertError(op1 == '+'
|| op1 == '-', "Invalid OP: " + op1);
assertError(op2 == '+'
|| op2 == '^', "Invalid OP: " + op2);
assertError(('0' <= xd && xd <= '9')
|| ('a' <= xd && xd <='f'), "Not an 0x? value: " + xd);
int shiftAmount = hexCharAsNumber(xd);
int mask = (op1 == '+') ? ((int) result) >>> shiftAmount : ((int) result) << shiftAmount;
long subresult = (op2 == '+') ? (((int) result) + ((int) mask) & 0xffffffff)
: (((int) result) ^ mask);
result = subresult;
current++;
}
return result;
}
public void assertError(boolean cond, String e) {
if (!cond) {
System.err.println();
}
}
public int hexCharAsNumber(char xd) {
return (xd >= 'a') ? xd - 87 : Character.getNumericValue(xd);
}
public int[] transformQuery(String query) {
int[] e = new int[1000];
int resultSize = 1000;
for (int f = 0, g = 0; g < query.length(); g++) {
int l = query.charAt(g);
if (l < 128) {
e[f++] = l; // 0{l[6-0]}
} else if (l < 2048) {
e[f++] = l >> 6 | 0xC0; // 110{l[10-6]}
e[f++] = l & 0x3F | 0x80; // 10{l[5-0]}
} else if (0xD800 == (l & 0xFC00) &&
g + 1 < query.length() && 0xDC00 == (query.charAt(g + 1) & 0xFC00)) {
// that's pretty rare... (avoid ovf?)
l = (1 << 16) + ((l & 0x03FF) << 10) + (query.charAt(++g) & 0x03FF);
e[f++] = l >> 18 | 0xF0; // 111100{l[9-8*]}
e[f++] = l >> 12 & 0x3F | 0x80; // 10{l[7*-2]}
e[f++] = l & 0x3F | 0x80; // 10{(l+1)[5-0]}
} else {
e[f++] = l >> 12 | 0xE0; // 1110{l[15-12]}
e[f++] = l >> 6 & 0x3F | 0x80; // 10{l[11-6]}
e[f++] = l & 0x3F | 0x80; // 10{l[5-0]}
}
resultSize = f;
}
return Arrays.copyOf(e, resultSize);
}
public long normalizeHash(long encondindRound2) {
if (encondindRound2 < 0) {
encondindRound2 = (encondindRound2 & 0x7fffffff) + 0x80000000L;
}
return (encondindRound2) % 1_000_000;
}
/*
/ EXAMPLE:
/
/ INPUT: query: 'hola', windowTkk: '409837.2120040981'
/ OUTPUT: '70528.480109'
/
*/
public String calcHash(String query, String windowTkk) {
// STEP 1: spread the the query char codes on a byte-array, 1-3 bytes per char
int[] bytesArray = transformQuery(query);
// STEP 2: starting with TKK index,
// add the array from last step one-by-one, and do 2 rounds of shift+add/xor
String[] d = windowTkk.split("\\.");
int tkkIndex = 0;
try {
tkkIndex = Integer.valueOf(d[0]);
}
catch (Exception e) {
e.printStackTrace();
}
long tkkKey = 0;
try {
tkkKey = Long.valueOf(d[1]);
}
catch (Exception e) {
e.printStackTrace();
}
int current = 0;
long result = tkkIndex;
while (current < bytesArray.length) {
result += bytesArray[current];
long subresult = shiftLeftOrRightThenSumOrXor(result,
new String[] {"+-a", "^+6"});
result = subresult;
current++;
}
long encondingRound1 = result;
//System.out.println("encodingRound1: " + encondingRound1);
// STEP 3: apply 3 rounds of shift+add/xor and XOR with they TKK key
long encondingRound2 = ((int) shiftLeftOrRightThenSumOrXor(encondingRound1,
new String[] {"+-3", "^+b", "+-f"})) ^ ((int) tkkKey);
//System.out.println("encodingRound2: " + encondingRound2);
// STEP 4: Normalize to 2s complement & format
long normalizedResult = normalizeHash(encondingRound2);
//System.out.println("normalizedResult: " + normalizedResult);
return String.valueOf(normalizedResult) + "."
+ (((int) normalizedResult) ^ (tkkIndex));
}
}
A project I am working on uses Apache Shiro as a security framework. Passwords are SHA1 hashed (no salt, no iterations). Login is SSL secured. However, the remaining part of the application is not SSL secured. In this context (no SSL) there should be a form where a user can change the password.
Since it wouldn't be a good idea to transmit it plainly it should be hashed on the client and then transmitted to the server. As the client is GWT (2.3) based, I am trying this library http://code.google.com/p/gwt-crypto, which uses code from bouncycastle.
However, in many cases (not all) the hashes generated by both frameworks differ in 1-4(?) characters.
For instance "happa3" is hashed to
"fe7f3cffd8a5f0512a5f1120f1369f48cd6f47c2"
by both implementations, whereas just "happa" is hashed to
"fb3c3a741b4e07a87d9cb68f3db020d6fbfed00a"
by the Shiro implementation and to
"fb3c3a741b4e07a87d9cb63f3db020d6fbfed00a"
by the gwt-crypto implementation (23rd character differs).
I wonder whether there is a "correct"/standard SHA1 hashing and whether there is a bug in one of the libraries or maybe my usage of them is flawed.
One of my first thoughts was related to different encodings or strange conversions due to different transport mechanisms (RPC vs. Post). To my knowledge though (and what puzzles me most), SHA1 hashes should differ completely with a high probability if there is just a difference of a single bit. So different encodings shouldn't be the issue here.
I am using this code on the client (GWT) for hashing:
String hashed = toHex(createSHA1Hash("password"));
...
private String createSHA1Hash(String passwordString){
SHA1Digest sha1 = new SHA1Digest();
byte[] bytes;
byte[] result = new byte[sha1.getDigestSize()];
try {
bytes = passwordString.getBytes();
sha1.update(bytes, 0, bytes.length);
int val = sha1.doFinal(result, 0);
} catch (UnsupportedEncodingException e) {}
return new String(result);
}
public String toHex(String arg) {
return new BigInteger(1, arg.getBytes()).toString(16);
}
And this on the server (Shiro):
String hashed = new Sha1Hash("password").toHex()
which afaics does something very similar behind the scenes (had a quick view on the source code).
Did I miss something obvious here?
EDIT: Seems like the GWT code does not run natively for some reason (i.e. just in development mode) and silently fails (it does compile, though). Have to find out why...
Edit(2): "int val = sha1.doFinal(result, 0);" is the line that makes trouble, i.e. if present, the whole code does not run natively (JS) but only in dev-mode (with wrong results)
You could test this version:
public class SHA1 {
public static native String calcSHA1(String s) /*-{
//
// A JavaScript implementation of the Secure Hash Algorithm, SHA-1, as defined
// in FIPS 180-1
// Version 2.2 Copyright Paul Johnston 2000 - 2009.
// Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
// Distributed under the BSD License
// See http://pajhome.org.uk/crypt/md5 for details.
//
//
// Configurable variables. You may need to tweak these to be compatible with
// the server-side, but the defaults work in most cases.
//
var hexcase = 0; // hex output format. 0 - lowercase; 1 - uppercase
var b64pad = ""; // base-64 pad character. "=" for strict RFC compliance
//
// These are the functions you'll usually want to call
// They take string arguments and return either hex or base-64 encoded strings
//
function b64_sha1(s) { return rstr2b64(rstr_sha1(str2rstr_utf8(s))); }
function any_sha1(s, e) { return rstr2any(rstr_sha1(str2rstr_utf8(s)), e); }
function hex_hmac_sha1(k, d)
{ return rstr2hex(rstr_hmac_sha1(str2rstr_utf8(k), str2rstr_utf8(d))); }
function b64_hmac_sha1(k, d)
{ return rstr2b64(rstr_hmac_sha1(str2rstr_utf8(k), str2rstr_utf8(d))); }
function any_hmac_sha1(k, d, e)
{ return rstr2any(rstr_hmac_sha1(str2rstr_utf8(k), str2rstr_utf8(d)), e); }
//
// Perform a simple self-test to see if the VM is working
//
function sha1_vm_test()
{
return hex_sha1("abc").toLowerCase() == "a9993e364706816aba3e25717850c26c9cd0d89d";
}
//
// Calculate the SHA1 of a raw string
//
function rstr_sha1(s)
{
return binb2rstr(binb_sha1(rstr2binb(s), s.length * 8));
}
//
// Calculate the HMAC-SHA1 of a key and some data (raw strings)
//
function rstr_hmac_sha1(key, data)
{
var bkey = rstr2binb(key);
if(bkey.length > 16) bkey = binb_sha1(bkey, key.length * 8);
var ipad = Array(16), opad = Array(16);
for(var i = 0; i < 16; i++)
{
ipad[i] = bkey[i] ^ 0x36363636;
opad[i] = bkey[i] ^ 0x5C5C5C5C;
}
var hash = binb_sha1(ipad.concat(rstr2binb(data)), 512 + data.length * 8);
return binb2rstr(binb_sha1(opad.concat(hash), 512 + 160));
}
//
// Convert a raw string to a hex string
//
function rstr2hex(input)
{
try { hexcase } catch(e) { hexcase=0; }
var hex_tab = hexcase ? "0123456789ABCDEF" : "0123456789abcdef";
var output = "";
var x;
for(var i = 0; i < input.length; i++)
{
x = input.charCodeAt(i);
output += hex_tab.charAt((x >>> 4) & 0x0F)
+ hex_tab.charAt( x & 0x0F);
}
return output;
}
//
// Convert a raw string to a base-64 string
//
function rstr2b64(input)
{
try { b64pad } catch(e) { b64pad=''; }
var tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
var output = "";
var len = input.length;
for(var i = 0; i < len; i += 3)
{
var triplet = (input.charCodeAt(i) << 16)
| (i + 1 < len ? input.charCodeAt(i+1) << 8 : 0)
| (i + 2 < len ? input.charCodeAt(i+2) : 0);
for(var j = 0; j < 4; j++)
{
if(i * 8 + j * 6 > input.length * 8) output += b64pad;
else output += tab.charAt((triplet >>> 6*(3-j)) & 0x3F);
}
}
return output;
}
//
// Convert a raw string to an arbitrary string encoding
//
function rstr2any(input, encoding)
{
var divisor = encoding.length;
var remainders = Array();
var i, q, x, quotient;
// Convert to an array of 16-bit big-endian values, forming the dividend
var dividend = Array(Math.ceil(input.length / 2));
for(i = 0; i < dividend.length; i++)
{
dividend[i] = (input.charCodeAt(i * 2) << 8) | input.charCodeAt(i * 2 + 1);
}
//
// Repeatedly perform a long division. The binary array forms the dividend,
// the length of the encoding is the divisor. Once computed, the quotient
// forms the dividend for the next step. We stop when the dividend is zero.
// All remainders are stored for later use.
//
while(dividend.length > 0)
{
quotient = Array();
x = 0;
for(i = 0; i < dividend.length; i++)
{
x = (x << 16) + dividend[i];
q = Math.floor(x / divisor);
x -= q * divisor;
if(quotient.length > 0 || q > 0)
quotient[quotient.length] = q;
}
remainders[remainders.length] = x;
dividend = quotient;
}
// Convert the remainders to the output string
var output = "";
for(i = remainders.length - 1; i >= 0; i--)
output += encoding.charAt(remainders[i]);
// Append leading zero equivalents
var full_length = Math.ceil(input.length * 8 /
(Math.log(encoding.length) / Math.log(2)))
for(i = output.length; i < full_length; i++)
output = encoding[0] + output;
return output;
}
//
// Encode a string as utf-8.
// For efficiency, this assumes the input is valid utf-16.
//
function str2rstr_utf8(input)
{
var output = "";
var i = -1;
var x, y;
while(++i < input.length)
{
// Decode utf-16 surrogate pairs
x = input.charCodeAt(i);
y = i + 1 < input.length ? input.charCodeAt(i + 1) : 0;
if(0xD800 <= x && x <= 0xDBFF && 0xDC00 <= y && y <= 0xDFFF)
{
x = 0x10000 + ((x & 0x03FF) << 10) + (y & 0x03FF);
i++;
}
// Encode output as utf-8
if(x <= 0x7F)
output += String.fromCharCode(x);
else if(x <= 0x7FF)
output += String.fromCharCode(0xC0 | ((x >>> 6 ) & 0x1F),
0x80 | ( x & 0x3F));
else if(x <= 0xFFFF)
output += String.fromCharCode(0xE0 | ((x >>> 12) & 0x0F),
0x80 | ((x >>> 6 ) & 0x3F),
0x80 | ( x & 0x3F));
else if(x <= 0x1FFFFF)
output += String.fromCharCode(0xF0 | ((x >>> 18) & 0x07),
0x80 | ((x >>> 12) & 0x3F),
0x80 | ((x >>> 6 ) & 0x3F),
0x80 | ( x & 0x3F));
}
return output;
}
//
// Encode a string as utf-16
//
function str2rstr_utf16le(input)
{
var output = "";
for(var i = 0; i < input.length; i++)
output += String.fromCharCode( input.charCodeAt(i) & 0xFF,
(input.charCodeAt(i) >>> 8) & 0xFF);
return output;
}
function str2rstr_utf16be(input)
{
var output = "";
for(var i = 0; i < input.length; i++)
output += String.fromCharCode((input.charCodeAt(i) >>> 8) & 0xFF,
input.charCodeAt(i) & 0xFF);
return output;
}
//
// Convert a raw string to an array of big-endian words
// Characters >255 have their high-byte silently ignored.
//
function rstr2binb(input)
{
var output = Array(input.length >> 2);
for(var i = 0; i < output.length; i++)
output[i] = 0;
for(var i = 0; i < input.length * 8; i += 8)
output[i>>5] |= (input.charCodeAt(i / 8) & 0xFF) << (24 - i % 32);
return output;
}
//
// Convert an array of big-endian words to a string
//
function binb2rstr(input)
{
var output = "";
for(var i = 0; i < input.length * 32; i += 8)
output += String.fromCharCode((input[i>>5] >>> (24 - i % 32)) & 0xFF);
return output;
}
//
// Calculate the SHA-1 of an array of big-endian words, and a bit length
//
function binb_sha1(x, len)
{
// append padding
x[len >> 5] |= 0x80 << (24 - len % 32);
x[((len + 64 >> 9) << 4) + 15] = len;
var w = Array(80);
var a = 1732584193;
var b = -271733879;
var c = -1732584194;
var d = 271733878;
var e = -1009589776;
for(var i = 0; i < x.length; i += 16)
{
var olda = a;
var oldb = b;
var oldc = c;
var oldd = d;
var olde = e;
for(var j = 0; j < 80; j++)
{
if(j < 16) w[j] = x[i + j];
else w[j] = bit_rol(w[j-3] ^ w[j-8] ^ w[j-14] ^ w[j-16], 1);
var t = safe_add(safe_add(bit_rol(a, 5), sha1_ft(j, b, c, d)),
safe_add(safe_add(e, w[j]), sha1_kt(j)));
e = d;
d = c;
c = bit_rol(b, 30);
b = a;
a = t;
}
a = safe_add(a, olda);
b = safe_add(b, oldb);
c = safe_add(c, oldc);
d = safe_add(d, oldd);
e = safe_add(e, olde);
}
return Array(a, b, c, d, e);
}
//
// Perform the appropriate triplet combination function for the current
// iteration
//
function sha1_ft(t, b, c, d)
{
if(t < 20) return (b & c) | ((~b) & d);
if(t < 40) return b ^ c ^ d;
if(t < 60) return (b & c) | (b & d) | (c & d);
return b ^ c ^ d;
}
//
// Determine the appropriate additive constant for the current iteration
//
function sha1_kt(t)
{
return (t < 20) ? 1518500249 : (t < 40) ? 1859775393 :
(t < 60) ? -1894007588 : -899497514;
}
//
// Add integers, wrapping at 2^32. This uses 16-bit operations internally
// to work around bugs in some JS interpreters.
//
function safe_add(x, y)
{
var lsw = (x & 0xFFFF) + (y & 0xFFFF);
var msw = (x >> 16) + (y >> 16) + (lsw >> 16);
return (msw << 16) | (lsw & 0xFFFF);
}
//
// Bitwise rotate a 32-bit number to the left.
//
function bit_rol(num, cnt)
{
return (num << cnt) | (num >>> (32 - cnt));
}
return rstr2hex(rstr_sha1(str2rstr_utf8(s)));
}-*/;
}
I'm using it in my client side sha generation and it worked well.