@@ -43,3 +43,8 @@ struct vm_area_struct *rust_helper_vma_lookup(struct mm_struct *mm,
{
return vma_lookup(mm, addr);
}
+
+void rust_helper_vma_end_read(struct vm_area_struct *vma)
+{
+ vma_end_read(vma);
+}
@@ -13,6 +13,7 @@
use core::{ops::Deref, ptr::NonNull};
pub mod virt;
+use virt::VmAreaRef;
/// A wrapper for the kernel's `struct mm_struct`.
///
@@ -170,6 +171,32 @@ pub unsafe fn from_raw<'a>(ptr: *const bindings::mm_struct) -> &'a MmWithUser {
unsafe { &*ptr.cast() }
}
+ /// Try to lock the vma read lock under rcu.
+ ///
+ /// If this operation fails, the vma may still exist. In that case, you should take the mmap
+ /// read lock and try to use `vma_lookup` instead.
+ ///
+ /// When per-vma locks are disabled, this always returns `None`.
+ #[inline]
+ pub fn lock_vma_under_rcu(&self, vma_addr: usize) -> Option<VmaReadGuard<'_>> {
+ #[cfg(CONFIG_PER_VMA_LOCK)]
+ {
+ // SAFETY: Calling `bindings::lock_vma_under_rcu` is always okay given an mm where
+ // `mm_users` is non-zero.
+ let vma = unsafe { bindings::lock_vma_under_rcu(self.as_raw(), vma_addr as _) };
+ if !vma.is_null() {
+ return Some(VmaReadGuard {
+ // SAFETY: If `lock_vma_under_rcu` returns a non-null ptr, then it points at a
+ // valid vma. The vma is stable for as long as the vma read lock is held.
+ vma: unsafe { VmAreaRef::from_raw(vma) },
+ _nts: NotThreadSafe,
+ });
+ }
+ }
+
+ None
+ }
+
/// Lock the mmap read lock.
#[inline]
pub fn mmap_read_lock(&self) -> MmapReadGuard<'_> {
@@ -238,3 +265,32 @@ fn drop(&mut self) {
unsafe { bindings::mmap_read_unlock(self.mm.as_raw()) };
}
}
+
+/// A guard for the vma read lock.
+///
+/// # Invariants
+///
+/// This `VmaReadGuard` guard owns the vma read lock.
+pub struct VmaReadGuard<'a> {
+ vma: &'a VmAreaRef,
+ // `vma_end_read` must be called on the same thread as where the lock was taken
+ _nts: NotThreadSafe,
+}
+
+// Make all `VmAreaRef` methods available on `VmaReadGuard`.
+impl Deref for VmaReadGuard<'_> {
+ type Target = VmAreaRef;
+
+ #[inline]
+ fn deref(&self) -> &VmAreaRef {
+ self.vma
+ }
+}
+
+impl Drop for VmaReadGuard<'_> {
+ #[inline]
+ fn drop(&mut self) {
+ // SAFETY: We hold the read lock by the type invariants.
+ unsafe { bindings::vma_end_read(self.vma.as_ptr()) };
+ }
+}
All of Rust Binder's existing calls to `vm_insert_page` could be optimized to first attempt to use `lock_vma_under_rcu`. This patch provides an abstraction to enable that. Signed-off-by: Alice Ryhl <aliceryhl@google.com> --- rust/helpers/mm.c | 5 +++++ rust/kernel/mm.rs | 56 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+)