For the function:
unsigned int f(const String& S)
{
unsigned int i;
long unsigned int bigval = S.Element(0); // S[0] or '\0'
for (i = 1; i < S.Size(); ++i)
bigval = ((bigval & 65535) * 18000) // low16 * magic_number
+ (bigval >> 16) // high16
+ S[i];
bigval = ((bigval & 65535) * 18000) + (bigval >> 16);
// bigval = low16 * magic_number + high16
return bigval & 65535;
// return low16
}
According to my class notes, passing the string 'a' is supposed return 42064, but I don't come up with that. Here is what I get:
unsigned int f(const String& S) // string passed is 'a'
{
unsigned int i;
long unsigned int bigval = S.Element(0); // S[0] = 97 ascii val for 'a'
for (i = 1; i < S.Size(); ++i) // only 1 iteration b/c only 1 char
bigval = ((bigval & 65535) * 18000)
// (97 & 65535) * 18000 = 1746000
+ (bigval >> 16)
// 1746000 + (1746000 >> 16) = 26
+ S[i];
// 26 + 0 = 23 because S[1] would be '\0'
bigval = ((bigval & 65535) * 18000) + (bigval >> 16);
//((26 & 65535) * 18000) + (26 >> 16) = 468000 + 0 = 468000
return bigval & 65535;
// 468000 & 65535 = 9248
}
What am I calculating wrong?
(edited: I forgot i = 1, not i = 0, but still don't get the correct answer...)
Last edited on
I think your bit shift is wrong. Try (bigval<<16)
.