I am trying to multiply two 32-bit numbers a and b which should give a 64-bit result. With a and b being unsigned 32-bit integers, i came up with this:
r = a * b
r = ((ah << 16) + al) * ((bh << 16) + bl)
= ((ah * 2^16) + al) * ((bh * 2^16) + bl)
= (ah * 2^16) * (bh * 2^16) + (ah * 2^16) * bl + al * (bh * 2^16) + al * bl
= (ah * bh * 2^32) + (ah * bl * 2^16) + (al * bh * 2^16) + (al * bl)
= ((ah * bh) << 32) + ((ah * bl) << 16) + ((al * bh) << 16) + (al * bl)
= ((ah * bh) << 32) + ((ah * bl + al * bh) << 16) + (al * bl)
which i then translated to c as follows
static void _mul64(unsigned int a, unsigned int b, unsigned int *hi, unsigned int *lo) {
unsigned int ah = (a >> 16), al = a & 0xffff,
bh = (b >> 16), bl = b & 0xffff,
rh = (ah * bh), rl = (al * bl),
rm1 = ah * bl, rm2 = al * bh,
rm1h = rm1 >> 16, rm2h = rm2 >> 16,
rm1l = rm1 & 0xffff, rm2l = rm2 & 0xffff,
rmh = rm1h + rm2h, rml = rm1l + rm2l;
rl = rl + (rml << 16);
rh = rh + rmh;
if(rml & 0xffff0000)
rh = rh + 1;
*lo = rl;
*hi = rh;
}
However when I run this little test which multiplies a = 0xFFFFFFFF with b = 0xFFFFFFFF and should yield 0xFFFFFFFE00000001, I get 0xFFFFFFFD00000001 instead. am I doing wrong?
int main(int argc, char **argv) {
unsigned int a, b, rl, rh;
unsigned long long r;
unsigned long long r1, r2, r3;
a = 0xffffffff;
b = 0xffffffff;
mul64(a, b, &rh, &rl);
r1 = ((unsigned long long) rh << 32) + rl;
r2 = (unsigned long long) a * b;
_mul64(a, b, &rh, &rl);
r3 = ((unsigned long long) rh << 32) + rl;
printf("a = 0x%08x, b = 0x%08x\n", (unsigned) a, (unsigned) b);
printf("_mul64: 0x%16llx\n", (unsigned long long) r3);
printf("a * b = 0x%16llx\n", (unsigned long long) r2);
return 0;
}