|
| 1 | +//! This is a copy of the `rustc_hash` crate, adapted to work as a module. |
| 2 | +//! |
| 3 | +//! If in the future it becomes more reasonable to add dependencies to |
| 4 | +//! `proc_macro`, this module should be removed and replaced with a dependency |
| 5 | +//! on the `rustc_hash` crate. |
| 6 | +
|
| 7 | +use std::collections::HashMap; |
| 8 | +use std::convert::TryInto; |
| 9 | +use std::default::Default; |
| 10 | +use std::hash::BuildHasherDefault; |
| 11 | +use std::hash::Hasher; |
| 12 | +use std::mem::size_of; |
| 13 | +use std::ops::BitXor; |
| 14 | + |
| 15 | +/// Type alias for a hashmap using the `fx` hash algorithm. |
| 16 | +pub type FxHashMap<K, V> = HashMap<K, V, BuildHasherDefault<FxHasher>>; |
| 17 | + |
| 18 | +/// A speedy hash algorithm for use within rustc. The hashmap in liballoc |
| 19 | +/// by default uses SipHash which isn't quite as speedy as we want. In the |
| 20 | +/// compiler we're not really worried about DOS attempts, so we use a fast |
| 21 | +/// non-cryptographic hash. |
| 22 | +/// |
| 23 | +/// This is the same as the algorithm used by Firefox -- which is a homespun |
| 24 | +/// one not based on any widely-known algorithm -- though modified to produce |
| 25 | +/// 64-bit hash values instead of 32-bit hash values. It consistently |
| 26 | +/// out-performs an FNV-based hash within rustc itself -- the collision rate is |
| 27 | +/// similar or slightly worse than FNV, but the speed of the hash function |
| 28 | +/// itself is much higher because it works on up to 8 bytes at a time. |
| 29 | +pub struct FxHasher { |
| 30 | + hash: usize, |
| 31 | +} |
| 32 | + |
| 33 | +#[cfg(target_pointer_width = "32")] |
| 34 | +const K: usize = 0x9e3779b9; |
| 35 | +#[cfg(target_pointer_width = "64")] |
| 36 | +const K: usize = 0x517cc1b727220a95; |
| 37 | + |
| 38 | +impl Default for FxHasher { |
| 39 | + #[inline] |
| 40 | + fn default() -> FxHasher { |
| 41 | + FxHasher { hash: 0 } |
| 42 | + } |
| 43 | +} |
| 44 | + |
| 45 | +impl FxHasher { |
| 46 | + #[inline] |
| 47 | + fn add_to_hash(&mut self, i: usize) { |
| 48 | + self.hash = self.hash.rotate_left(5).bitxor(i).wrapping_mul(K); |
| 49 | + } |
| 50 | +} |
| 51 | + |
| 52 | +impl Hasher for FxHasher { |
| 53 | + #[inline] |
| 54 | + fn write(&mut self, mut bytes: &[u8]) { |
| 55 | + #[cfg(target_pointer_width = "32")] |
| 56 | + let read_usize = |bytes: &[u8]| u32::from_ne_bytes(bytes[..4].try_into().unwrap()); |
| 57 | + #[cfg(target_pointer_width = "64")] |
| 58 | + let read_usize = |bytes: &[u8]| u64::from_ne_bytes(bytes[..8].try_into().unwrap()); |
| 59 | + |
| 60 | + let mut hash = FxHasher { hash: self.hash }; |
| 61 | + assert!(size_of::<usize>() <= 8); |
| 62 | + while bytes.len() >= size_of::<usize>() { |
| 63 | + hash.add_to_hash(read_usize(bytes) as usize); |
| 64 | + bytes = &bytes[size_of::<usize>()..]; |
| 65 | + } |
| 66 | + if (size_of::<usize>() > 4) && (bytes.len() >= 4) { |
| 67 | + hash.add_to_hash(u32::from_ne_bytes(bytes[..4].try_into().unwrap()) as usize); |
| 68 | + bytes = &bytes[4..]; |
| 69 | + } |
| 70 | + if (size_of::<usize>() > 2) && bytes.len() >= 2 { |
| 71 | + hash.add_to_hash(u16::from_ne_bytes(bytes[..2].try_into().unwrap()) as usize); |
| 72 | + bytes = &bytes[2..]; |
| 73 | + } |
| 74 | + if (size_of::<usize>() > 1) && bytes.len() >= 1 { |
| 75 | + hash.add_to_hash(bytes[0] as usize); |
| 76 | + } |
| 77 | + self.hash = hash.hash; |
| 78 | + } |
| 79 | + |
| 80 | + #[inline] |
| 81 | + fn write_u8(&mut self, i: u8) { |
| 82 | + self.add_to_hash(i as usize); |
| 83 | + } |
| 84 | + |
| 85 | + #[inline] |
| 86 | + fn write_u16(&mut self, i: u16) { |
| 87 | + self.add_to_hash(i as usize); |
| 88 | + } |
| 89 | + |
| 90 | + #[inline] |
| 91 | + fn write_u32(&mut self, i: u32) { |
| 92 | + self.add_to_hash(i as usize); |
| 93 | + } |
| 94 | + |
| 95 | + #[cfg(target_pointer_width = "32")] |
| 96 | + #[inline] |
| 97 | + fn write_u64(&mut self, i: u64) { |
| 98 | + self.add_to_hash(i as usize); |
| 99 | + self.add_to_hash((i >> 32) as usize); |
| 100 | + } |
| 101 | + |
| 102 | + #[cfg(target_pointer_width = "64")] |
| 103 | + #[inline] |
| 104 | + fn write_u64(&mut self, i: u64) { |
| 105 | + self.add_to_hash(i as usize); |
| 106 | + } |
| 107 | + |
| 108 | + #[inline] |
| 109 | + fn write_usize(&mut self, i: usize) { |
| 110 | + self.add_to_hash(i); |
| 111 | + } |
| 112 | + |
| 113 | + #[inline] |
| 114 | + fn finish(&self) -> u64 { |
| 115 | + self.hash as u64 |
| 116 | + } |
| 117 | +} |
0 commit comments