Auto merge of #155550 - zetanumbers:cache_insert_unique, r=oli-obk

Replace `ShardedHashMap` method `insert` with debug-checked `insert_unique`

Currently every use of `ShardedHashMap::insert` checks that it won't evict an old value due to unique key. I haven't found any issue related to that faulty condition, so I thought of replacing it with `ShardedHashMap::insert_unique` which doesn't check for this condition unless `debug_assertions` are enabled. This might improve the performance.

r? @petrochenkov
This commit is contained in:
bors
2026-04-22 22:28:22 +00:00
3 changed files with 25 additions and 24 deletions
+15 -7
View File
@@ -1,6 +1,6 @@
use std::borrow::Borrow;
use std::hash::{Hash, Hasher};
use std::{iter, mem};
use std::iter;
use either::Either;
use hashbrown::hash_table::{self, Entry, HashTable};
@@ -183,19 +183,27 @@ pub fn get_or_insert_with(&self, key: K, default: impl FnOnce() -> V) -> V
}
}
/// Insert value into the [`ShardedHashMap`] with unique key.
///
/// This function panics if debug_assertions are enabled and uniqueness is violated.
/// If uniqueness is violated but debug_assertions are disabled then lookups will arbitrarily
/// return one of the inserted elements.
#[inline]
pub fn insert(&self, key: K, value: V) -> Option<V> {
pub fn insert_unique(&self, key: K, value: V) {
let hash = make_hash(&key);
let mut shard = self.lock_shard_by_hash(hash);
match table_entry(&mut shard, hash, &key) {
Entry::Occupied(e) => {
let previous = mem::replace(&mut e.into_mut().1, value);
Some(previous)
cfg_select! {
debug_assertions => match table_entry(&mut shard, hash, &key) {
Entry::Occupied(_) => {
panic!("tried to insert key that's already present");
}
Entry::Vacant(e) => {
e.insert((key, value));
None
}
}
_ => {
shard.insert_unique(hash, (key, value), |(k, _)| make_hash(k));
}
}
}
+7 -12
View File
@@ -473,9 +473,8 @@ fn reserve_and_set_dedup(self, alloc: GlobalAlloc<'tcx>, salt: usize) -> AllocId
}
let id = self.alloc_map.reserve();
debug!("creating alloc {:?} with id {id:?}", alloc_salt.0);
let had_previous = self.alloc_map.to_alloc.insert(id, alloc_salt.0.clone()).is_some();
// We just reserved, so should always be unique.
assert!(!had_previous);
self.alloc_map.to_alloc.insert_unique(id, alloc_salt.0.clone());
dedup.insert(alloc_salt, id);
id
}
@@ -548,21 +547,17 @@ pub fn global_alloc(self, id: AllocId) -> GlobalAlloc<'tcx> {
}
/// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to
/// call this function twice, even with the same `Allocation` will ICE the compiler.
/// call this function twice, even with the same `Allocation` will ICE the compiler if
/// debug_assertions are enabled.
pub fn set_alloc_id_memory(self, id: AllocId, mem: ConstAllocation<'tcx>) {
if let Some(old) = self.alloc_map.to_alloc.insert(id, GlobalAlloc::Memory(mem)) {
bug!("tried to set allocation ID {id:?}, but it was already existing as {old:#?}");
}
self.alloc_map.to_alloc.insert_unique(id, GlobalAlloc::Memory(mem))
}
/// Freezes an `AllocId` created with `reserve` by pointing it at a static item. Trying to
/// call this function twice, even with the same `DefId` will ICE the compiler.
/// call this function twice, even with the same `DefId` will ICE the compiler if
/// debug_assertions are enabled.
pub fn set_nested_alloc_id_static(self, id: AllocId, def_id: LocalDefId) {
if let Some(old) =
self.alloc_map.to_alloc.insert(id, GlobalAlloc::Static(def_id.to_def_id()))
{
bug!("tried to set allocation ID {id:?}, but it was already existing as {old:#?}");
}
self.alloc_map.to_alloc.insert_unique(id, GlobalAlloc::Static(def_id.to_def_id()))
}
}
+1 -3
View File
@@ -65,9 +65,7 @@ fn lookup(&self, key: &K) -> Option<(V, DepNodeIndex)> {
#[inline]
fn complete(&self, key: K, value: V, index: DepNodeIndex) {
// We may be overwriting another value. This is all right, since the dep-graph
// will check that the value fingerprint matches.
self.cache.insert(key, (value, index));
self.cache.insert_unique(key, (value, index));
}
fn for_each(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) {