@@ -14,74 +14,53 @@ internal static class Marvin
14
14
/// Convenience method to compute a Marvin hash and collapse it into a 32-bit hash.
15
15
/// </summary>
16
16
[ MethodImpl ( MethodImplOptions . AggressiveInlining ) ]
17
- public static int ComputeHash32 ( ref byte data , int count , ulong seed )
17
+ public static int ComputeHash32 ( ReadOnlySpan < byte > data , ulong seed )
18
18
{
19
- long hash64 = ComputeHash ( ref data , count , seed ) ;
19
+ long hash64 = ComputeHash ( data , seed ) ;
20
20
return ( ( int ) ( hash64 >> 32 ) ) ^ ( int ) hash64 ;
21
21
}
22
22
23
23
/// <summary>
24
24
/// Computes a 64-hash using the Marvin algorithm.
25
25
/// </summary>
26
- public static long ComputeHash ( ref byte data , int count , ulong seed )
26
+ public static long ComputeHash ( ReadOnlySpan < byte > data , ulong seed )
27
27
{
28
- uint ucount = ( uint ) count ;
29
28
uint p0 = ( uint ) seed ;
30
29
uint p1 = ( uint ) ( seed >> 32 ) ;
31
30
32
- int byteOffset = 0 ; // declared as signed int so we don't have to cast everywhere (it's passed to Unsafe.Add() and used for nothing else.)
33
-
34
- while ( ucount >= 8 )
31
+ if ( data . Length >= sizeof ( uint ) )
35
32
{
36
- p0 += Unsafe . As < byte , uint > ( ref Unsafe . Add ( ref data , byteOffset ) ) ;
37
- Block ( ref p0 , ref p1 ) ;
33
+ ReadOnlySpan < uint > uData = data . NonPortableCast < byte , uint > ( ) ;
38
34
39
- p0 += Unsafe . As < byte , uint > ( ref Unsafe . Add ( ref data , byteOffset + 4 ) ) ;
40
- Block ( ref p0 , ref p1 ) ;
35
+ for ( int i = 0 ; i < uData . Length ; i ++ )
36
+ {
37
+ p0 += uData [ i ] ;
38
+ Block ( ref p0 , ref p1 ) ;
39
+ }
41
40
42
- byteOffset += 8 ;
43
- ucount -= 8 ;
41
+ // byteOffset = data.Length - data.Length % 4
42
+ // is equivalent to clearing last 2 bits of length
43
+ // Using it directly gives a perf hit for short strings making it at least 5% or more slower.
44
+ int byteOffset = data . Length & ( ~ 3 ) ;
45
+ data = data . Slice ( byteOffset ) ;
44
46
}
45
47
46
- switch ( ucount )
48
+ switch ( data . Length )
47
49
{
48
- case 4 :
49
- p0 += Unsafe . As < byte , uint > ( ref Unsafe . Add ( ref data , byteOffset ) ) ;
50
- Block ( ref p0 , ref p1 ) ;
51
- goto case 0 ;
52
-
53
50
case 0 :
54
51
p0 += 0x80u ;
55
52
break ;
56
53
57
- case 5 :
58
- p0 += Unsafe . As < byte , uint > ( ref Unsafe . Add ( ref data , byteOffset ) ) ;
59
- byteOffset += 4 ;
60
- Block ( ref p0 , ref p1 ) ;
61
- goto case 1 ;
62
-
63
54
case 1 :
64
- p0 += 0x8000u | Unsafe . Add ( ref data , byteOffset ) ;
55
+ p0 += 0x8000u | data [ 0 ] ;
65
56
break ;
66
57
67
- case 6 :
68
- p0 += Unsafe . As < byte , uint > ( ref Unsafe . Add ( ref data , byteOffset ) ) ;
69
- byteOffset += 4 ;
70
- Block ( ref p0 , ref p1 ) ;
71
- goto case 2 ;
72
-
73
58
case 2 :
74
- p0 += 0x800000u | Unsafe . As < byte , ushort > ( ref Unsafe . Add ( ref data , byteOffset ) ) ;
59
+ p0 += 0x800000u | data . NonPortableCast < byte , ushort > ( ) [ 0 ] ;
75
60
break ;
76
61
77
- case 7 :
78
- p0 += Unsafe . As < byte , uint > ( ref Unsafe . Add ( ref data , byteOffset ) ) ;
79
- byteOffset += 4 ;
80
- Block ( ref p0 , ref p1 ) ;
81
- goto case 3 ;
82
-
83
62
case 3 :
84
- p0 += 0x80000000u | ( ( ( uint ) ( Unsafe . Add ( ref data , byteOffset + 2 ) ) ) << 16 ) | ( uint ) ( Unsafe . As < byte , ushort > ( ref Unsafe . Add ( ref data , byteOffset ) ) ) ;
63
+ p0 += 0x80000000u | ( ( ( uint ) data [ 2 ] ) << 16 ) | ( uint ) ( data . NonPortableCast < byte , ushort > ( ) [ 0 ] ) ;
85
64
break ;
86
65
87
66
default :
0 commit comments