|
5 | 5 | //! This module abstracts the parts of the kernel lockdep API relevant to Rust |
6 | 6 | //! modules, including lock classes. |
7 | 7 |
|
8 | | -use crate::types::Opaque; |
| 8 | +use crate::{ |
| 9 | + alloc::flags::*, |
| 10 | + c_str, fmt, |
| 11 | + init::InPlaceInit, |
| 12 | + new_mutex, |
| 13 | + prelude::{KBox, KVec, Result}, |
| 14 | + str::{CStr, CString}, |
| 15 | + sync::Mutex, |
| 16 | + types::Opaque, |
| 17 | +}; |
| 18 | + |
| 19 | +use core::hash::{Hash, Hasher}; |
| 20 | +use core::pin::Pin; |
| 21 | +use core::sync::atomic::{AtomicPtr, Ordering}; |
9 | 22 |
|
10 | 23 | /// Represents a lockdep class. It's a wrapper around C's `lock_class_key`. |
11 | 24 | #[repr(transparent)] |
@@ -42,3 +55,144 @@ impl LockClassKey { |
42 | 55 | // actually dereferenced. |
43 | 56 | unsafe impl Send for LockClassKey {} |
44 | 57 | unsafe impl Sync for LockClassKey {} |
| 58 | + |
| 59 | +// Location is 'static but not really, since module unloads will |
| 60 | +// invalidate existing static Locations within that module. |
| 61 | +// To avoid breakage, we maintain our own location struct which is |
| 62 | +// dynamically allocated on first reference. We store a hash of the |
| 63 | +// whole location (including the filename string), as well as the |
| 64 | +// line and column separately. The assumption is that this whole |
| 65 | +// struct is highly unlikely to ever collide with a reasonable |
| 66 | +// hash (this saves us from having to check the filename string |
| 67 | +// itself). |
| 68 | +#[derive(PartialEq, Debug)] |
| 69 | +struct LocationKey { |
| 70 | + hash: u64, |
| 71 | + line: u32, |
| 72 | + column: u32, |
| 73 | +} |
| 74 | + |
| 75 | +struct DynLockClassKey { |
| 76 | + key: Opaque<bindings::lock_class_key>, |
| 77 | + loc: LocationKey, |
| 78 | + name: CString, |
| 79 | +} |
| 80 | + |
| 81 | +impl LocationKey { |
| 82 | + fn new(loc: &'static core::panic::Location<'static>) -> Self { |
| 83 | + let mut hasher = crate::siphash::SipHasher::new(); |
| 84 | + loc.hash(&mut hasher); |
| 85 | + |
| 86 | + LocationKey { |
| 87 | + hash: hasher.finish(), |
| 88 | + line: loc.line(), |
| 89 | + column: loc.column(), |
| 90 | + } |
| 91 | + } |
| 92 | +} |
| 93 | + |
| 94 | +impl DynLockClassKey { |
| 95 | + fn key(&'static self) -> LockClassKey { |
| 96 | + LockClassKey(self.key.get()) |
| 97 | + } |
| 98 | + |
| 99 | + fn name(&'static self) -> &CStr { |
| 100 | + &self.name |
| 101 | + } |
| 102 | +} |
| 103 | + |
| 104 | +const LOCK_CLASS_BUCKETS: usize = 1024; |
| 105 | + |
| 106 | +#[track_caller] |
| 107 | +fn caller_lock_class_inner() -> Result<&'static DynLockClassKey> { |
| 108 | + // This is just a hack to make the below static array initialization work. |
| 109 | + #[allow(clippy::declare_interior_mutable_const)] |
| 110 | + const ATOMIC_PTR: AtomicPtr<Mutex<KVec<&'static DynLockClassKey>>> = |
| 111 | + AtomicPtr::new(core::ptr::null_mut()); |
| 112 | + |
| 113 | + #[allow(clippy::complexity)] |
| 114 | + static LOCK_CLASSES: [AtomicPtr<Mutex<KVec<&'static DynLockClassKey>>>; LOCK_CLASS_BUCKETS] = |
| 115 | + [ATOMIC_PTR; LOCK_CLASS_BUCKETS]; |
| 116 | + |
| 117 | + let loc = core::panic::Location::caller(); |
| 118 | + let loc_key = LocationKey::new(loc); |
| 119 | + |
| 120 | + let index = (loc_key.hash % (LOCK_CLASS_BUCKETS as u64)) as usize; |
| 121 | + let slot = &LOCK_CLASSES[index]; |
| 122 | + |
| 123 | + let mut ptr = slot.load(Ordering::Relaxed); |
| 124 | + if ptr.is_null() { |
| 125 | + let new_element = KBox::pin_init(new_mutex!(KVec::new()), GFP_KERNEL)?; |
| 126 | + |
| 127 | + // SAFETY: We never move out of this Box |
| 128 | + let raw = KBox::into_raw(unsafe { Pin::into_inner_unchecked(new_element) }); |
| 129 | + |
| 130 | + if slot |
| 131 | + .compare_exchange( |
| 132 | + core::ptr::null_mut(), |
| 133 | + raw, |
| 134 | + Ordering::Relaxed, |
| 135 | + Ordering::Relaxed, |
| 136 | + ) |
| 137 | + .is_err() |
| 138 | + { |
| 139 | + // SAFETY: We just got this pointer from `into_raw()` |
| 140 | + unsafe { drop(KBox::from_raw(raw)) }; |
| 141 | + } |
| 142 | + |
| 143 | + ptr = slot.load(Ordering::Relaxed); |
| 144 | + assert!(!ptr.is_null()); |
| 145 | + } |
| 146 | + |
| 147 | + // SAFETY: This mutex was either just created above or previously allocated, |
| 148 | + // and we never free these objects so the pointer is guaranteed to be valid. |
| 149 | + let mut guard = unsafe { (*ptr).lock() }; |
| 150 | + |
| 151 | + for i in guard.iter() { |
| 152 | + if i.loc == loc_key { |
| 153 | + return Ok(i); |
| 154 | + } |
| 155 | + } |
| 156 | + |
| 157 | + // We immediately leak the class, so it becomes 'static |
| 158 | + let new_class = KBox::leak(KBox::new( |
| 159 | + DynLockClassKey { |
| 160 | + key: Opaque::zeroed(), |
| 161 | + loc: loc_key, |
| 162 | + name: CString::try_from_fmt(fmt!("{}:{}:{}", loc.file(), loc.line(), loc.column()))?, |
| 163 | + }, |
| 164 | + GFP_KERNEL, |
| 165 | + )?); |
| 166 | + |
| 167 | + // SAFETY: This is safe to call with a pointer to a dynamically allocated lockdep key, |
| 168 | + // and we never free the objects so it is safe to never unregister the key. |
| 169 | + unsafe { bindings::lockdep_register_key(new_class.key.get()) }; |
| 170 | + |
| 171 | + guard.push(new_class, GFP_KERNEL)?; |
| 172 | + |
| 173 | + Ok(new_class) |
| 174 | +} |
| 175 | + |
| 176 | +#[track_caller] |
| 177 | +pub(crate) fn caller_lock_class() -> (LockClassKey, &'static CStr) { |
| 178 | + match caller_lock_class_inner() { |
| 179 | + Ok(a) => (a.key(), a.name()), |
| 180 | + Err(_) => { |
| 181 | + crate::pr_err!( |
| 182 | + "Failed to dynamically allocate lock class, lockdep may be unreliable.\n" |
| 183 | + ); |
| 184 | + |
| 185 | + let loc = core::panic::Location::caller(); |
| 186 | + // SAFETY: LockClassKey is opaque and the lockdep implementation only needs |
| 187 | + // unique addresses for statically allocated keys, so it is safe to just cast |
| 188 | + // the Location reference directly into a LockClassKey. However, this will |
| 189 | + // result in multiple keys for the same callsite due to monomorphization, |
| 190 | + // as well as spuriously destroyed keys when the static key is allocated in |
| 191 | + // the wrong module, which is what makes this unreliable. |
| 192 | + ( |
| 193 | + LockClassKey(loc as *const _ as *mut _), |
| 194 | + c_str!("fallback_lock_class"), |
| 195 | + ) |
| 196 | + } |
| 197 | + } |
| 198 | +} |
0 commit comments