diff mbox series

[RFC,07/11] rust: sync: Implement dynamic lockdep class creation

Message ID 20230714-classless_lockdep-v1-7-229b9671ce31@asahilina.net (mailing list archive)
State New, archived
Headers show
Series rust: Implicit lock class creation & Arc Lockdep integration | expand

Commit Message

Asahi Lina July 14, 2023, 9:13 a.m. UTC
Using macros to create lock classes all over the place is unergonomic,
and makes it impossible to add new features that require lock classes to
code such as Arc<> without changing all callers.

Rust has the ability to track the caller's identity by file/line/column
number, and we can use that to dynamically generate lock classes
instead.

Signed-off-by: Asahi Lina <lina@asahilina.net>
---
 rust/kernel/sync/lockdep.rs    | 147 ++++++++++++++++++++++++++++++++++++++++-
 rust/kernel/sync/no_lockdep.rs |   8 +++
 2 files changed, 154 insertions(+), 1 deletion(-)

Comments

Martin Rodriguez Reboredo July 14, 2023, 7:56 p.m. UTC | #1
On 7/14/23 06:13, Asahi Lina wrote:
> Using macros to create lock classes all over the place is unergonomic,
> and makes it impossible to add new features that require lock classes to
> code such as Arc<> without changing all callers.
> 
> Rust has the ability to track the caller's identity by file/line/column
> number, and we can use that to dynamically generate lock classes
> instead.
> 
> Signed-off-by: Asahi Lina <lina@asahilina.net>
> ---
> [...]
> +
> +const LOCK_CLASS_BUCKETS: usize = 1024;
> +
> +#[track_caller]
> +fn caller_lock_class_inner() -> Result<&'static DynLockClassKey> {
> +    // This is just a hack to make the below static array initialization work.
> +    #[allow(clippy::declare_interior_mutable_const)]
> +    const ATOMIC_PTR: AtomicPtr<Mutex<Vec<&'static DynLockClassKey>>> =
> +        AtomicPtr::new(core::ptr::null_mut());
> +
> +    #[allow(clippy::complexity)]
> +    static LOCK_CLASSES: [AtomicPtr<Mutex<Vec<&'static DynLockClassKey>>>; LOCK_CLASS_BUCKETS] =
> +        [ATOMIC_PTR; LOCK_CLASS_BUCKETS];
> +
> +    let loc = core::panic::Location::caller();
> +    let loc_key = LocationKey::new(loc);
> +
> +    let index = (loc_key.hash % (LOCK_CLASS_BUCKETS as u64)) as usize;
> +    let slot = &LOCK_CLASSES[index];
> +
> +    let mut ptr = slot.load(Ordering::Relaxed);
> +    if ptr.is_null() {
> +        let new_element = Box::pin_init(new_mutex!(Vec::new()))?;
> +
> +        if let Err(e) = slot.compare_exchange(
> +            core::ptr::null_mut(),
> +            // SAFETY: We never move out of this Box
> +            Box::into_raw(unsafe { Pin::into_inner_unchecked(new_element) }),
> +            Ordering::Relaxed,
> +            Ordering::Relaxed,
> +        ) {
> +            // SAFETY: We just got this pointer from `into_raw()`
> +            unsafe { Box::from_raw(e) };
> +        }
> +
> +        ptr = slot.load(Ordering::Relaxed);
> +        assert!(!ptr.is_null());
> +    }
> +
> +    // SAFETY: This mutex was either just created above or previously allocated,
> +    // and we never free these objects so the pointer is guaranteed to be valid.
> +    let mut guard = unsafe { (*ptr).lock() };
> +
> +    for i in guard.iter() {
> +        if i.loc == loc_key {
> +            return Ok(i);
> +        }
> +    }
> +
> +    // We immediately leak the class, so it becomes 'static
> +    let new_class = Box::leak(Box::try_new(DynLockClassKey {
> +        key: Opaque::zeroed(),
> +        loc: loc_key,
> +        name: CString::try_from_fmt(fmt!("{}:{}:{}", loc.file(), loc.line(), loc.column()))?,
> +    })?);
> +
> +    // SAFETY: This is safe to call with a pointer to a dynamically allocated lockdep key,
> +    // and we never free the objects so it is safe to never unregister the key.
> +    unsafe { bindings::lockdep_register_key(new_class.key.get()) };
> +
> +    guard.try_push(new_class)?;
> +
> +    Ok(new_class)
> +}
> +
> [...]

Is there any problem if we have many `DynLockClassKey`s leaked or not?
Gary Guo July 15, 2023, 3:47 p.m. UTC | #2
On Fri, 14 Jul 2023 18:13:59 +0900
Asahi Lina <lina@asahilina.net> wrote:

> Using macros to create lock classes all over the place is unergonomic,
> and makes it impossible to add new features that require lock classes to
> code such as Arc<> without changing all callers.
> 
> Rust has the ability to track the caller's identity by file/line/column
> number, and we can use that to dynamically generate lock classes
> instead.
> 
> Signed-off-by: Asahi Lina <lina@asahilina.net>
> ---
>  rust/kernel/sync/lockdep.rs    | 147 ++++++++++++++++++++++++++++++++++++++++-
>  rust/kernel/sync/no_lockdep.rs |   8 +++
>  2 files changed, 154 insertions(+), 1 deletion(-)
> 
> diff --git a/rust/kernel/sync/lockdep.rs b/rust/kernel/sync/lockdep.rs
> index d8328f4275fb..fbf9f6ed403d 100644
> --- a/rust/kernel/sync/lockdep.rs
> +++ b/rust/kernel/sync/lockdep.rs
> @@ -5,7 +5,19 @@
>  //! This module abstracts the parts of the kernel lockdep API relevant to Rust
>  //! modules, including lock classes.
>  
> -use crate::types::Opaque;
> +use crate::{
> +    c_str, fmt,
> +    init::InPlaceInit,
> +    new_mutex,
> +    prelude::{Box, Result, Vec},
> +    str::{CStr, CString},
> +    sync::Mutex,
> +    types::Opaque,
> +};
> +
> +use core::hash::{Hash, Hasher};
> +use core::pin::Pin;
> +use core::sync::atomic::{AtomicPtr, Ordering};
>  
>  /// Represents a lockdep class. It's a wrapper around C's `lock_class_key`.
>  #[repr(transparent)]
> @@ -42,3 +54,136 @@ pub(crate) fn as_ptr(&self) -> *mut bindings::lock_class_key {
>  // actually dereferenced.
>  unsafe impl Send for LockClassKey {}
>  unsafe impl Sync for LockClassKey {}
> +
> +// Location is 'static but not really, since module unloads will
> +// invalidate existing static Locations within that module.
> +// To avoid breakage, we maintain our own location struct which is
> +// dynamically allocated on first reference. We store a hash of the
> +// whole location (including the filename string), as well as the
> +// line and column separately. The assumption is that this whole
> +// struct is highly unlikely to ever collide with a reasonable
> +// hash (this saves us from having to check the filename string
> +// itself).
> +#[derive(PartialEq, Debug)]
> +struct LocationKey {
> +    hash: u64,
> +    line: u32,
> +    column: u32,
> +}
> +
> +struct DynLockClassKey {
> +    key: Opaque<bindings::lock_class_key>,
> +    loc: LocationKey,
> +    name: CString,
> +}
> +
> +impl LocationKey {
> +    fn new(loc: &'static core::panic::Location<'static>) -> Self {
> +        let mut hasher = crate::siphash::SipHasher::new();
> +        loc.hash(&mut hasher);
> +
> +        LocationKey {
> +            hash: hasher.finish(),
> +            line: loc.line(),
> +            column: loc.column(),
> +        }
> +    }
> +}
> +
> +impl DynLockClassKey {
> +    fn key(&'static self) -> LockClassKey {
> +        LockClassKey(self.key.get())
> +    }

I don't understand why PATCH 06 is needed. If we keep the current
`LockClassKey` definition this could just be returning `'static
LockClassKey`, which is a simple `&self.key`.

> +
> +    fn name(&'static self) -> &CStr {
> +        &self.name
> +    }
> +}
> +
> +const LOCK_CLASS_BUCKETS: usize = 1024;
> +
> +#[track_caller]
> +fn caller_lock_class_inner() -> Result<&'static DynLockClassKey> {
> +    // This is just a hack to make the below static array initialization work.
> +    #[allow(clippy::declare_interior_mutable_const)]
> +    const ATOMIC_PTR: AtomicPtr<Mutex<Vec<&'static DynLockClassKey>>> =
> +        AtomicPtr::new(core::ptr::null_mut());
> +
> +    #[allow(clippy::complexity)]
> +    static LOCK_CLASSES: [AtomicPtr<Mutex<Vec<&'static DynLockClassKey>>>; LOCK_CLASS_BUCKETS] =
> +        [ATOMIC_PTR; LOCK_CLASS_BUCKETS];
> +
> +    let loc = core::panic::Location::caller();
> +    let loc_key = LocationKey::new(loc);
> +
> +    let index = (loc_key.hash % (LOCK_CLASS_BUCKETS as u64)) as usize;
> +    let slot = &LOCK_CLASSES[index];
> +
> +    let mut ptr = slot.load(Ordering::Relaxed);
> +    if ptr.is_null() {
> +        let new_element = Box::pin_init(new_mutex!(Vec::new()))?;
> +
> +        if let Err(e) = slot.compare_exchange(
> +            core::ptr::null_mut(),
> +            // SAFETY: We never move out of this Box
> +            Box::into_raw(unsafe { Pin::into_inner_unchecked(new_element) }),
> +            Ordering::Relaxed,
> +            Ordering::Relaxed,
> +        ) {
> +            // SAFETY: We just got this pointer from `into_raw()`
> +            unsafe { Box::from_raw(e) };
> +        }
> +
> +        ptr = slot.load(Ordering::Relaxed);
> +        assert!(!ptr.is_null());
> +    }
> +
> +    // SAFETY: This mutex was either just created above or previously allocated,
> +    // and we never free these objects so the pointer is guaranteed to be valid.
> +    let mut guard = unsafe { (*ptr).lock() };
> +
> +    for i in guard.iter() {
> +        if i.loc == loc_key {
> +            return Ok(i);
> +        }
> +    }
> +
> +    // We immediately leak the class, so it becomes 'static
> +    let new_class = Box::leak(Box::try_new(DynLockClassKey {
> +        key: Opaque::zeroed(),
> +        loc: loc_key,
> +        name: CString::try_from_fmt(fmt!("{}:{}:{}", loc.file(), loc.line(), loc.column()))?,
> +    })?);
> +
> +    // SAFETY: This is safe to call with a pointer to a dynamically allocated lockdep key,
> +    // and we never free the objects so it is safe to never unregister the key.
> +    unsafe { bindings::lockdep_register_key(new_class.key.get()) };
> +
> +    guard.try_push(new_class)?;
> +
> +    Ok(new_class)
> +}
> +
> +#[track_caller]
> +pub(crate) fn caller_lock_class() -> (LockClassKey, &'static CStr) {
> +    match caller_lock_class_inner() {
> +        Ok(a) => (a.key(), a.name()),
> +        Err(_) => {
> +            crate::pr_err!(
> +                "Failed to dynamically allocate lock class, lockdep may be unreliable.\n"
> +            );
> +
> +            let loc = core::panic::Location::caller();
> +            // SAFETY: LockClassKey is opaque and the lockdep implementation only needs
> +            // unique addresses for statically allocated keys, so it is safe to just cast
> +            // the Location reference directly into a LockClassKey. However, this will
> +            // result in multiple keys for the same callsite due to monomorphization,
> +            // as well as spuriously destroyed keys when the static key is allocated in
> +            // the wrong module, which is what makes this unreliable.

If the only purpose of introducing `StaticLockClassKey` and change
`LockClassKey` is to make this fallback path work, then I don't think
that change is worth it. I don't really see an issue with forging a
`'static LockClassKey' from a `'static Location`, especially since you
can't really do any memory access with `LockClassKey`.

> +            (
> +                LockClassKey(loc as *const _ as *mut _),
> +                c_str!("fallback_lock_class"),
> +            )
> +        }
> +    }
> +}
> diff --git a/rust/kernel/sync/no_lockdep.rs b/rust/kernel/sync/no_lockdep.rs
> index 518ec0bf9a7d..de53c4de7fbe 100644
> --- a/rust/kernel/sync/no_lockdep.rs
> +++ b/rust/kernel/sync/no_lockdep.rs
> @@ -4,6 +4,8 @@
>  //!
>  //! Takes the place of the `lockdep` module when lockdep is disabled.
>  
> +use crate::{c_str, str::CStr};
> +
>  /// A dummy, zero-sized lock class.
>  pub struct StaticLockClassKey();
>  
> @@ -28,3 +30,9 @@ pub(crate) fn as_ptr(&self) -> *mut bindings::lock_class_key {
>          core::ptr::null_mut()
>      }
>  }
> +
> +pub(crate) fn caller_lock_class() -> (LockClassKey, &'static CStr) {
> +    static DUMMY_LOCK_CLASS: StaticLockClassKey = StaticLockClassKey::new();
> +
> +    (DUMMY_LOCK_CLASS.key(), c_str!("dummy"))
> +}
>
diff mbox series

Patch

diff --git a/rust/kernel/sync/lockdep.rs b/rust/kernel/sync/lockdep.rs
index d8328f4275fb..fbf9f6ed403d 100644
--- a/rust/kernel/sync/lockdep.rs
+++ b/rust/kernel/sync/lockdep.rs
@@ -5,7 +5,19 @@ 
 //! This module abstracts the parts of the kernel lockdep API relevant to Rust
 //! modules, including lock classes.
 
-use crate::types::Opaque;
+use crate::{
+    c_str, fmt,
+    init::InPlaceInit,
+    new_mutex,
+    prelude::{Box, Result, Vec},
+    str::{CStr, CString},
+    sync::Mutex,
+    types::Opaque,
+};
+
+use core::hash::{Hash, Hasher};
+use core::pin::Pin;
+use core::sync::atomic::{AtomicPtr, Ordering};
 
 /// Represents a lockdep class. It's a wrapper around C's `lock_class_key`.
 #[repr(transparent)]
@@ -42,3 +54,136 @@  pub(crate) fn as_ptr(&self) -> *mut bindings::lock_class_key {
 // actually dereferenced.
 unsafe impl Send for LockClassKey {}
 unsafe impl Sync for LockClassKey {}
+
+// Location is 'static but not really, since module unloads will
+// invalidate existing static Locations within that module.
+// To avoid breakage, we maintain our own location struct which is
+// dynamically allocated on first reference. We store a hash of the
+// whole location (including the filename string), as well as the
+// line and column separately. The assumption is that this whole
+// struct is highly unlikely to ever collide with a reasonable
+// hash (this saves us from having to check the filename string
+// itself).
+#[derive(PartialEq, Debug)]
+struct LocationKey {
+    hash: u64,
+    line: u32,
+    column: u32,
+}
+
+struct DynLockClassKey {
+    key: Opaque<bindings::lock_class_key>,
+    loc: LocationKey,
+    name: CString,
+}
+
+impl LocationKey {
+    fn new(loc: &'static core::panic::Location<'static>) -> Self {
+        let mut hasher = crate::siphash::SipHasher::new();
+        loc.hash(&mut hasher);
+
+        LocationKey {
+            hash: hasher.finish(),
+            line: loc.line(),
+            column: loc.column(),
+        }
+    }
+}
+
+impl DynLockClassKey {
+    fn key(&'static self) -> LockClassKey {
+        LockClassKey(self.key.get())
+    }
+
+    fn name(&'static self) -> &CStr {
+        &self.name
+    }
+}
+
+const LOCK_CLASS_BUCKETS: usize = 1024;
+
+#[track_caller]
+fn caller_lock_class_inner() -> Result<&'static DynLockClassKey> {
+    // This is just a hack to make the below static array initialization work.
+    #[allow(clippy::declare_interior_mutable_const)]
+    const ATOMIC_PTR: AtomicPtr<Mutex<Vec<&'static DynLockClassKey>>> =
+        AtomicPtr::new(core::ptr::null_mut());
+
+    #[allow(clippy::complexity)]
+    static LOCK_CLASSES: [AtomicPtr<Mutex<Vec<&'static DynLockClassKey>>>; LOCK_CLASS_BUCKETS] =
+        [ATOMIC_PTR; LOCK_CLASS_BUCKETS];
+
+    let loc = core::panic::Location::caller();
+    let loc_key = LocationKey::new(loc);
+
+    let index = (loc_key.hash % (LOCK_CLASS_BUCKETS as u64)) as usize;
+    let slot = &LOCK_CLASSES[index];
+
+    let mut ptr = slot.load(Ordering::Relaxed);
+    if ptr.is_null() {
+        let new_element = Box::pin_init(new_mutex!(Vec::new()))?;
+
+        if let Err(e) = slot.compare_exchange(
+            core::ptr::null_mut(),
+            // SAFETY: We never move out of this Box
+            Box::into_raw(unsafe { Pin::into_inner_unchecked(new_element) }),
+            Ordering::Relaxed,
+            Ordering::Relaxed,
+        ) {
+            // SAFETY: We just got this pointer from `into_raw()`
+            unsafe { Box::from_raw(e) };
+        }
+
+        ptr = slot.load(Ordering::Relaxed);
+        assert!(!ptr.is_null());
+    }
+
+    // SAFETY: This mutex was either just created above or previously allocated,
+    // and we never free these objects so the pointer is guaranteed to be valid.
+    let mut guard = unsafe { (*ptr).lock() };
+
+    for i in guard.iter() {
+        if i.loc == loc_key {
+            return Ok(i);
+        }
+    }
+
+    // We immediately leak the class, so it becomes 'static
+    let new_class = Box::leak(Box::try_new(DynLockClassKey {
+        key: Opaque::zeroed(),
+        loc: loc_key,
+        name: CString::try_from_fmt(fmt!("{}:{}:{}", loc.file(), loc.line(), loc.column()))?,
+    })?);
+
+    // SAFETY: This is safe to call with a pointer to a dynamically allocated lockdep key,
+    // and we never free the objects so it is safe to never unregister the key.
+    unsafe { bindings::lockdep_register_key(new_class.key.get()) };
+
+    guard.try_push(new_class)?;
+
+    Ok(new_class)
+}
+
+#[track_caller]
+pub(crate) fn caller_lock_class() -> (LockClassKey, &'static CStr) {
+    match caller_lock_class_inner() {
+        Ok(a) => (a.key(), a.name()),
+        Err(_) => {
+            crate::pr_err!(
+                "Failed to dynamically allocate lock class, lockdep may be unreliable.\n"
+            );
+
+            let loc = core::panic::Location::caller();
+            // SAFETY: LockClassKey is opaque and the lockdep implementation only needs
+            // unique addresses for statically allocated keys, so it is safe to just cast
+            // the Location reference directly into a LockClassKey. However, this will
+            // result in multiple keys for the same callsite due to monomorphization,
+            // as well as spuriously destroyed keys when the static key is allocated in
+            // the wrong module, which is what makes this unreliable.
+            (
+                LockClassKey(loc as *const _ as *mut _),
+                c_str!("fallback_lock_class"),
+            )
+        }
+    }
+}
diff --git a/rust/kernel/sync/no_lockdep.rs b/rust/kernel/sync/no_lockdep.rs
index 518ec0bf9a7d..de53c4de7fbe 100644
--- a/rust/kernel/sync/no_lockdep.rs
+++ b/rust/kernel/sync/no_lockdep.rs
@@ -4,6 +4,8 @@ 
 //!
 //! Takes the place of the `lockdep` module when lockdep is disabled.
 
+use crate::{c_str, str::CStr};
+
 /// A dummy, zero-sized lock class.
 pub struct StaticLockClassKey();
 
@@ -28,3 +30,9 @@  pub(crate) fn as_ptr(&self) -> *mut bindings::lock_class_key {
         core::ptr::null_mut()
     }
 }
+
+pub(crate) fn caller_lock_class() -> (LockClassKey, &'static CStr) {
+    static DUMMY_LOCK_CLASS: StaticLockClassKey = StaticLockClassKey::new();
+
+    (DUMMY_LOCK_CLASS.key(), c_str!("dummy"))
+}