murmurhash.c revision 1.7
11.7Sriastrad/*	$NetBSD: murmurhash.c,v 1.7 2019/08/20 12:33:26 riastradh Exp $	*/
21.1Srmind
31.1Srmind/*
41.1Srmind * MurmurHash2 -- from the original code:
51.1Srmind *
61.1Srmind * "MurmurHash2 was written by Austin Appleby, and is placed in the public
71.1Srmind * domain. The author hereby disclaims copyright to this source code."
81.1Srmind *
91.1Srmind * References:
101.1Srmind *	http://code.google.com/p/smhasher/
111.1Srmind *	https://sites.google.com/site/murmurhash/
121.1Srmind */
131.1Srmind
141.1Srmind#include <sys/cdefs.h>
151.3Srmind
161.3Srmind#if defined(_KERNEL) || defined(_STANDALONE)
171.7Sriastrad__KERNEL_RCSID(0, "$NetBSD: murmurhash.c,v 1.7 2019/08/20 12:33:26 riastradh Exp $");
181.4Schristos
191.3Srmind#else
201.4Schristos
211.4Schristos#if defined(LIBC_SCCS) && !defined(lint)
221.7Sriastrad__RCSID("$NetBSD: murmurhash.c,v 1.7 2019/08/20 12:33:26 riastradh Exp $");
231.4Schristos#endif /* LIBC_SCCS and not lint */
241.4Schristos
251.4Schristos#include "namespace.h"
261.3Srmind#endif
271.3Srmind
281.1Srmind#include <sys/types.h>
291.6Srmind#include <sys/param.h>
301.1Srmind#include <sys/hash.h>
311.1Srmind
321.5Srmind#if !defined(_KERNEL) && !defined(_STANDALONE)
331.3Srmind#ifdef __weak_alias
341.3Srmind__weak_alias(murmurhash2,_murmurhash2)
351.1Srmind#endif
361.5Srmind#endif
371.1Srmind
381.1Srminduint32_t
391.1Srmindmurmurhash2(const void *key, size_t len, uint32_t seed)
401.1Srmind{
411.1Srmind	/*
421.1Srmind	 * Note: 'm' and 'r' are mixing constants generated offline.
431.1Srmind	 * They're not really 'magic', they just happen to work well.
441.1Srmind	 * Initialize the hash to a 'random' value.
451.1Srmind	 */
461.1Srmind	const uint32_t m = 0x5bd1e995;
471.1Srmind	const int r = 24;
481.1Srmind
491.4Schristos	const uint8_t *data = key;
501.2Srmind	uint32_t h = seed ^ (uint32_t)len;
511.1Srmind
521.6Srmind	if (__predict_true(ALIGNED_POINTER(key, uint32_t))) {
531.6Srmind		while (len >= sizeof(uint32_t)) {
541.7Sriastrad			uint32_t k;
551.7Sriastrad
561.7Sriastrad			ALIGNED_POINTER_LOAD(&k, data, uint32_t);
571.7Sriastrad			k = htole32(k);
581.6Srmind
591.6Srmind			k *= m;
601.6Srmind			k ^= k >> r;
611.6Srmind			k *= m;
621.6Srmind
631.6Srmind			h *= m;
641.6Srmind			h ^= k;
651.6Srmind
661.6Srmind			data += sizeof(uint32_t);
671.6Srmind			len -= sizeof(uint32_t);
681.6Srmind		}
691.6Srmind	} else {
701.6Srmind		while (len >= sizeof(uint32_t)) {
711.6Srmind			uint32_t k;
721.6Srmind
731.6Srmind			k  = data[0];
741.6Srmind			k |= data[1] << 8;
751.6Srmind			k |= data[2] << 16;
761.6Srmind			k |= data[3] << 24;
771.6Srmind
781.6Srmind			k *= m;
791.6Srmind			k ^= k >> r;
801.6Srmind			k *= m;
811.6Srmind
821.6Srmind			h *= m;
831.6Srmind			h ^= k;
841.6Srmind
851.6Srmind			data += sizeof(uint32_t);
861.6Srmind			len -= sizeof(uint32_t);
871.6Srmind		}
881.1Srmind	}
891.1Srmind
901.1Srmind	/* Handle the last few bytes of the input array. */
911.1Srmind	switch (len) {
921.1Srmind	case 3:
931.1Srmind		h ^= data[2] << 16;
941.2Srmind		/* FALLTHROUGH */
951.1Srmind	case 2:
961.1Srmind		h ^= data[1] << 8;
971.2Srmind		/* FALLTHROUGH */
981.1Srmind	case 1:
991.1Srmind		h ^= data[0];
1001.1Srmind		h *= m;
1011.1Srmind	}
1021.1Srmind
1031.1Srmind	/*
1041.1Srmind	 * Do a few final mixes of the hash to ensure the last few
1051.1Srmind	 * bytes are well-incorporated.
1061.1Srmind	 */
1071.1Srmind	h ^= h >> 13;
1081.1Srmind	h *= m;
1091.1Srmind	h ^= h >> 15;
1101.1Srmind
1111.1Srmind	return h;
1121.1Srmind}
113