Ejemplo n.º 1
0
			/*
			--------------------------------------------------------------------
			This works on all machines, is identical to hash() on little-endian
			machines, and it is much faster than hash(), but it requires
			-- that the key be an array of ub8's, and
			-- that all your machines have the same endianness, and
			-- that the length be the number of ub8's in the key
			--------------------------------------------------------------------
			*/
			static inline uint64_t hash642(uint64_t const * k, uint64_t length, uint64_t level = 0x9e3779b97f4a7c13LL)
			{
				uint64_t a,b,c,len;

				/* Set up the internal state */
				len = length;
				a = b = level;                         /* the previous hash value */
				c = 0x9e3779b97f4a7c13LL; /* the golden ratio; an arbitrary value */

				/*---------------------------------------- handle most of the key */
				while (len >= 3)
				{
					a += k[0];
					b += k[1];
					c += k[2];
					mix64(a,b,c);
					k += 3; len -= 3;
				}

				/*-------------------------------------- handle the last 2 uint64_t's */
                                c += (length<<3);
                                switch(len)              /* all the case statements fall through */
                                {
                                	/* c is reserved for the length */
                                	case  2: b+=k[1];
                                	case  1: a+=k[0];
                                	/* case 0: nothing left to add */
				}
				mix64(a,b,c);
                                /*-------------------------------------------- report the result */
                                return c;
			}
Ejemplo n.º 2
0
			/*
			--------------------------------------------------------------------
			hash() -- hash a variable-length key into a 64-bit value
			k     : the key (the unaligned variable-length array of bytes)
			len   : the length of the key, counting by bytes
			level : can be any 8-byte value
			Returns a 64-bit value.  Every bit of the key affects every bit of
			the return value.  No funnels.  Every 1-bit and 2-bit delta achieves
			avalanche.  About 41+5len instructions.

			The best hash table sizes are powers of 2.  There is no need to do
			mod a prime (mod is sooo slow!).  If you need less than 64 bits,
			use a bitmask.  For example, if you need only 10 bits, do
			h = (h & hashmask(10));
			In which case, the hash table should have hashsize(10) elements.

			If you are hashing n strings (ub1 **)k, do it like this:
			for (i=0, h=0; i<n; ++i) h = hash( k[i], len[i], h);

			By Bob Jenkins, Jan 4 1997.  [email protected].  You may
			use this code any way you wish, private, educational, or commercial,
			but I would appreciate if you give me credit.

			See http://burtleburtle.net/bob/hash/evahash.html
			Use for hash table lookup, or anything where one collision in 2^^64
			is acceptable.  Do NOT use for cryptographic purposes.
			--------------------------------------------------------------------
			*/
			inline static uint64_t hash64(uint8_t const * k, uint64_t length, uint64_t level = 0x9e3779b97f4a7c13LL)
			{
				uint64_t a,b,c,len;

				/* Set up the internal state */
				len = length;
				a = b = level;                         /* the previous hash value */
				c = 0x9e3779b97f4a7c13LL; /* the golden ratio; an arbitrary value */

				/*---------------------------------------- handle most of the key */
				while (len >= 24)
				{
					a += (k[0]        +(static_cast<uint64_t>(k[ 1])<< 8)+(static_cast<uint64_t>(k[ 2])<<16)+(static_cast<uint64_t>(k[ 3])<<24)
						+(static_cast<uint64_t>(k[4 ])<<32)+(static_cast<uint64_t>(k[ 5])<<40)+(static_cast<uint64_t>(k[ 6])<<48)+(static_cast<uint64_t>(k[ 7])<<56));
					b += (k[8]        +(static_cast<uint64_t>(k[ 9])<< 8)+(static_cast<uint64_t>(k[10])<<16)+(static_cast<uint64_t>(k[11])<<24)
						+(static_cast<uint64_t>(k[12])<<32)+(static_cast<uint64_t>(k[13])<<40)+(static_cast<uint64_t>(k[14])<<48)+(static_cast<uint64_t>(k[15])<<56));
					c += (k[16]       +(static_cast<uint64_t>(k[17])<< 8)+(static_cast<uint64_t>(k[18])<<16)+(static_cast<uint64_t>(k[19])<<24)
						+(static_cast<uint64_t>(k[20])<<32)+(static_cast<uint64_t>(k[21])<<40)+(static_cast<uint64_t>(k[22])<<48)+(static_cast<uint64_t>(k[23])<<56));
					mix64(a,b,c);
					k += 24; len -= 24;
				}

				/*------------------------------------- handle the last 23 bytes */
				c += length;
				switch(len)              /* all the case statements fall through */
				{
					case 23: c+=(static_cast<uint64_t>(k[22])<<56);
					case 22: c+=(static_cast<uint64_t>(k[21])<<48);
					case 21: c+=(static_cast<uint64_t>(k[20])<<40);
					case 20: c+=(static_cast<uint64_t>(k[19])<<32);
					case 19: c+=(static_cast<uint64_t>(k[18])<<24);
					case 18: c+=(static_cast<uint64_t>(k[17])<<16);
					case 17: c+=(static_cast<uint64_t>(k[16])<<8);
					/* the first byte of c is reserved for the length */
					case 16: b+=(static_cast<uint64_t>(k[15])<<56);
					case 15: b+=(static_cast<uint64_t>(k[14])<<48);
					case 14: b+=(static_cast<uint64_t>(k[13])<<40);
					case 13: b+=(static_cast<uint64_t>(k[12])<<32);
					case 12: b+=(static_cast<uint64_t>(k[11])<<24);
					case 11: b+=(static_cast<uint64_t>(k[10])<<16);
					case 10: b+=(static_cast<uint64_t>(k[ 9])<<8);
					case  9: b+=(static_cast<uint64_t>(k[ 8]));
					case  8: a+=(static_cast<uint64_t>(k[ 7])<<56);
					case  7: a+=(static_cast<uint64_t>(k[ 6])<<48);
					case  6: a+=(static_cast<uint64_t>(k[ 5])<<40);
					case  5: a+=(static_cast<uint64_t>(k[ 4])<<32);
					case  4: a+=(static_cast<uint64_t>(k[ 3])<<24);
					case  3: a+=(static_cast<uint64_t>(k[ 2])<<16);
					case  2: a+=(static_cast<uint64_t>(k[ 1])<<8);
					case  1: a+=(static_cast<uint64_t>(k[ 0]));
					/* case 0: nothing left to add */
				}

				mix64(a,b,c);

				/*-------------------------------------------- report the result */
				return c;
			}
Ejemplo n.º 3
0
U64 hash64( register const U8 *k, register U32 length, register U64 initval )
{
  register U64 a,b,c,len;

  /* Set up the internal state */
  len = length;
  a = b = initval;                         /* the previous hash value */
  c = 0x9e3779b97f4a7c13LL; /* the golden ratio; an arbitrary value */

  /*---------------------------------------- handle most of the key */
  while (len >= 24)
  {
    a += (k[0]        +((U64)k[ 1]<< 8)+((U64)k[ 2]<<16)+((U64)k[ 3]<<24)
     +((U64)k[4 ]<<32)+((U64)k[ 5]<<40)+((U64)k[ 6]<<48)+((U64)k[ 7]<<56));
    b += (k[8]        +((U64)k[ 9]<< 8)+((U64)k[10]<<16)+((U64)k[11]<<24)
     +((U64)k[12]<<32)+((U64)k[13]<<40)+((U64)k[14]<<48)+((U64)k[15]<<56));
    c += (k[16]       +((U64)k[17]<< 8)+((U64)k[18]<<16)+((U64)k[19]<<24)
     +((U64)k[20]<<32)+((U64)k[21]<<40)+((U64)k[22]<<48)+((U64)k[23]<<56));
    mix64(a,b,c);
    k += 24; len -= 24;
  }

  /*------------------------------------- handle the last 23 bytes */
  c += length;
  switch(len)              /* all the case statements fall through */
  {
  case 23: c+=((U64)k[22]<<56);
  case 22: c+=((U64)k[21]<<48);
  case 21: c+=((U64)k[20]<<40);
  case 20: c+=((U64)k[19]<<32);
  case 19: c+=((U64)k[18]<<24);
  case 18: c+=((U64)k[17]<<16);
  case 17: c+=((U64)k[16]<<8);
    /* the first byte of c is reserved for the length */
  case 16: b+=((U64)k[15]<<56);
  case 15: b+=((U64)k[14]<<48);
  case 14: b+=((U64)k[13]<<40);
  case 13: b+=((U64)k[12]<<32);
  case 12: b+=((U64)k[11]<<24);
  case 11: b+=((U64)k[10]<<16);
  case 10: b+=((U64)k[ 9]<<8);
  case  9: b+=((U64)k[ 8]);
  case  8: a+=((U64)k[ 7]<<56);
  case  7: a+=((U64)k[ 6]<<48);
  case  6: a+=((U64)k[ 5]<<40);
  case  5: a+=((U64)k[ 4]<<32);
  case  4: a+=((U64)k[ 3]<<24);
  case  3: a+=((U64)k[ 2]<<16);
  case  2: a+=((U64)k[ 1]<<8);
  case  1: a+=((U64)k[ 0]);
    /* case 0: nothing left to add */
  }
  mix64(a,b,c);
  /*-------------------------------------------- report the result */
  return c;
}
Ejemplo n.º 4
0
static __inline uint64_t final_weak_avalanche(uint64_t a, uint64_t b) {
  /* LY: for performance reason on a some not high-end CPUs
   * I replaced the second mux64() operation by mix64().
   * Unfortunately this approach fails the "strict avalanche criteria",
   * see test results at https://github.com/demerphq/smhasher. */
  return mux64(rot64(a + b, 17), prime_4) + mix64(a ^ b, prime_0);
}
Ejemplo n.º 5
0
static udmhash64_t hash64( register const char *k, register size_t length, udmhash64_t level) 
/* register ub1 *k;        *//* the key */
/* register ub8  length;   *//* the length of the key */
/* register ub8  level;    *//* the previous hash, or an arbitrary value */
{
  register udmhash64_t a, b, c;
  register size_t len;

  /* Set up the internal state */
  len = length;
  a = b = level;                         /* the previous hash value */
#ifndef WIN32
  c = 0x9e3779b97f4a7c13LL; /* the golden ratio; an arbitrary value */
#else
  c = 0x9e3779b97f4a7c13; /* the golden ratio; an arbitrary value */
#endif

  /*---------------------------------------- handle most of the key */
  while (len >= 24)
  {
    a += (k[0]        +((udmhash64_t)k[ 1]<< 8)+((udmhash64_t)k[ 2]<<16)+((udmhash64_t)k[ 3]<<24)
     +((udmhash64_t)k[4 ]<<32)+((udmhash64_t)k[ 5]<<40)+((udmhash64_t)k[ 6]<<48)+((udmhash64_t)k[ 7]<<56));
    b += (k[8]        +((udmhash64_t)k[ 9]<< 8)+((udmhash64_t)k[10]<<16)+((udmhash64_t)k[11]<<24)
     +((udmhash64_t)k[12]<<32)+((udmhash64_t)k[13]<<40)+((udmhash64_t)k[14]<<48)+((udmhash64_t)k[15]<<56));
    c += (k[16]       +((udmhash64_t)k[17]<< 8)+((udmhash64_t)k[18]<<16)+((udmhash64_t)k[19]<<24)
     +((udmhash64_t)k[20]<<32)+((udmhash64_t)k[21]<<40)+((udmhash64_t)k[22]<<48)+((udmhash64_t)k[23]<<56));
    mix64(a,b,c);
    k += 24; len -= 24;
  }

  /*------------------------------------- handle the last 23 bytes */
  c += length;
  switch(len)              /* all the case statements fall through */
  {
  case 23: c+=((udmhash64_t)k[22]<<56);
  case 22: c+=((udmhash64_t)k[21]<<48);
  case 21: c+=((udmhash64_t)k[20]<<40);
  case 20: c+=((udmhash64_t)k[19]<<32);
  case 19: c+=((udmhash64_t)k[18]<<24);
  case 18: c+=((udmhash64_t)k[17]<<16);
  case 17: c+=((udmhash64_t)k[16]<<8);
    /* the first byte of c is reserved for the length */
  case 16: b+=((udmhash64_t)k[15]<<56);
  case 15: b+=((udmhash64_t)k[14]<<48);
  case 14: b+=((udmhash64_t)k[13]<<40);
  case 13: b+=((udmhash64_t)k[12]<<32);
  case 12: b+=((udmhash64_t)k[11]<<24);
  case 11: b+=((udmhash64_t)k[10]<<16);
  case 10: b+=((udmhash64_t)k[ 9]<<8);
  case  9: b+=((udmhash64_t)k[ 8]);
  case  8: a+=((udmhash64_t)k[ 7]<<56);
  case  7: a+=((udmhash64_t)k[ 6]<<48);
  case  6: a+=((udmhash64_t)k[ 5]<<40);
  case  5: a+=((udmhash64_t)k[ 4]<<32);
  case  4: a+=((udmhash64_t)k[ 3]<<24);
  case  3: a+=((udmhash64_t)k[ 2]<<16);
  case  2: a+=((udmhash64_t)k[ 1]<<8);
  case  1: a+=((udmhash64_t)k[ 0]);
    /* case 0: nothing left to add */
  }
  mix64(a,b,c);
  /*-------------------------------------------- report the result */
  return c;
}