1 // SPDX-License-Identifier: GPL-2.0
5 use crate::init::{self, PinInit};
9 marker::{PhantomData, PhantomPinned},
10 mem::{ManuallyDrop, MaybeUninit},
11 ops::{Deref, DerefMut},
16 /// Used to transfer ownership to and from foreign (non-Rust) languages.
18 /// Ownership is transferred from Rust to a foreign language by calling [`Self::into_foreign`] and
19 /// later may be transferred back to Rust by calling [`Self::from_foreign`].
21 /// This trait is meant to be used in cases when Rust objects are stored in C objects and
22 /// eventually "freed" back to Rust.
23 pub trait ForeignOwnable: Sized {
24 /// Type of values borrowed between calls to [`ForeignOwnable::into_foreign`] and
25 /// [`ForeignOwnable::from_foreign`].
28 /// Converts a Rust-owned object to a foreign-owned one.
30 /// The foreign representation is a pointer to void. There are no guarantees for this pointer.
31 /// For example, it might be invalid, dangling or pointing to uninitialized memory. Using it in
32 /// any way except for [`ForeignOwnable::from_foreign`], [`ForeignOwnable::borrow`],
33 /// [`ForeignOwnable::try_from_foreign`] can result in undefined behavior.
34 fn into_foreign(self) -> *const core::ffi::c_void;
36 /// Borrows a foreign-owned object.
40 /// `ptr` must have been returned by a previous call to [`ForeignOwnable::into_foreign`] for
41 /// which a previous matching [`ForeignOwnable::from_foreign`] hasn't been called yet.
42 unsafe fn borrow<'a>(ptr: *const core::ffi::c_void) -> Self::Borrowed<'a>;
44 /// Converts a foreign-owned object back to a Rust-owned one.
48 /// `ptr` must have been returned by a previous call to [`ForeignOwnable::into_foreign`] for
49 /// which a previous matching [`ForeignOwnable::from_foreign`] hasn't been called yet.
50 /// Additionally, all instances (if any) of values returned by [`ForeignOwnable::borrow`] for
51 /// this object must have been dropped.
52 unsafe fn from_foreign(ptr: *const core::ffi::c_void) -> Self;
54 /// Tries to convert a foreign-owned object back to a Rust-owned one.
56 /// A convenience wrapper over [`ForeignOwnable::from_foreign`] that returns [`None`] if `ptr`
61 /// `ptr` must either be null or satisfy the safety requirements for
62 /// [`ForeignOwnable::from_foreign`].
63 unsafe fn try_from_foreign(ptr: *const core::ffi::c_void) -> Option<Self> {
67 // SAFETY: Since `ptr` is not null here, then `ptr` satisfies the safety requirements
68 // of `from_foreign` given the safety requirements of this function.
69 unsafe { Some(Self::from_foreign(ptr)) }
74 impl<T: 'static> ForeignOwnable for Box<T> {
75 type Borrowed<'a> = &'a T;
77 fn into_foreign(self) -> *const core::ffi::c_void {
78 Box::into_raw(self) as _
81 unsafe fn borrow<'a>(ptr: *const core::ffi::c_void) -> &'a T {
82 // SAFETY: The safety requirements for this function ensure that the object is still alive,
83 // so it is safe to dereference the raw pointer.
84 // The safety requirements of `from_foreign` also ensure that the object remains alive for
85 // the lifetime of the returned value.
86 unsafe { &*ptr.cast() }
89 unsafe fn from_foreign(ptr: *const core::ffi::c_void) -> Self {
90 // SAFETY: The safety requirements of this function ensure that `ptr` comes from a previous
91 // call to `Self::into_foreign`.
92 unsafe { Box::from_raw(ptr as _) }
96 impl<T: 'static> ForeignOwnable for Pin<Box<T>> {
97 type Borrowed<'a> = Pin<&'a T>;
99 fn into_foreign(self) -> *const core::ffi::c_void {
100 // SAFETY: We are still treating the box as pinned.
101 Box::into_raw(unsafe { Pin::into_inner_unchecked(self) }) as _
104 unsafe fn borrow<'a>(ptr: *const core::ffi::c_void) -> Pin<&'a T> {
105 // SAFETY: The safety requirements for this function ensure that the object is still alive,
106 // so it is safe to dereference the raw pointer.
107 // The safety requirements of `from_foreign` also ensure that the object remains alive for
108 // the lifetime of the returned value.
109 let r = unsafe { &*ptr.cast() };
111 // SAFETY: This pointer originates from a `Pin<Box<T>>`.
112 unsafe { Pin::new_unchecked(r) }
115 unsafe fn from_foreign(ptr: *const core::ffi::c_void) -> Self {
116 // SAFETY: The safety requirements of this function ensure that `ptr` comes from a previous
117 // call to `Self::into_foreign`.
118 unsafe { Pin::new_unchecked(Box::from_raw(ptr as _)) }
122 impl ForeignOwnable for () {
123 type Borrowed<'a> = ();
125 fn into_foreign(self) -> *const core::ffi::c_void {
126 core::ptr::NonNull::dangling().as_ptr()
129 unsafe fn borrow<'a>(_: *const core::ffi::c_void) -> Self::Borrowed<'a> {}
131 unsafe fn from_foreign(_: *const core::ffi::c_void) -> Self {}
134 /// Runs a cleanup function/closure when dropped.
136 /// The [`ScopeGuard::dismiss`] function prevents the cleanup function from running.
140 /// In the example below, we have multiple exit paths and we want to log regardless of which one is
144 /// # use kernel::types::ScopeGuard;
145 /// fn example1(arg: bool) {
146 /// let _log = ScopeGuard::new(|| pr_info!("example1 completed\n"));
152 /// pr_info!("Do something...\n");
155 /// # example1(false);
156 /// # example1(true);
159 /// In the example below, we want to log the same message on all early exits but a different one on
160 /// the main exit path:
163 /// # use kernel::types::ScopeGuard;
164 /// fn example2(arg: bool) {
165 /// let log = ScopeGuard::new(|| pr_info!("example2 returned early\n"));
171 /// // (Other early returns...)
174 /// pr_info!("example2 no early return\n");
177 /// # example2(false);
178 /// # example2(true);
181 /// In the example below, we need a mutable object (the vector) to be accessible within the log
182 /// function, so we wrap it in the [`ScopeGuard`]:
185 /// # use kernel::types::ScopeGuard;
186 /// fn example3(arg: bool) -> Result {
188 /// ScopeGuard::new_with_data(Vec::new(), |v| pr_info!("vec had {} elements\n", v.len()));
190 /// vec.push(10u8, GFP_KERNEL)?;
194 /// vec.push(20u8, GFP_KERNEL)?;
198 /// # assert_eq!(example3(false), Ok(()));
199 /// # assert_eq!(example3(true), Ok(()));
204 /// The value stored in the struct is nearly always `Some(_)`, except between
205 /// [`ScopeGuard::dismiss`] and [`ScopeGuard::drop`]: in this case, it will be `None` as the value
206 /// will have been returned to the caller. Since [`ScopeGuard::dismiss`] consumes the guard,
207 /// callers won't be able to use it anymore.
208 pub struct ScopeGuard<T, F: FnOnce(T)>(Option<(T, F)>);
210 impl<T, F: FnOnce(T)> ScopeGuard<T, F> {
211 /// Creates a new guarded object wrapping the given data and with the given cleanup function.
212 pub fn new_with_data(data: T, cleanup_func: F) -> Self {
213 // INVARIANT: The struct is being initialised with `Some(_)`.
214 Self(Some((data, cleanup_func)))
217 /// Prevents the cleanup function from running and returns the guarded data.
218 pub fn dismiss(mut self) -> T {
219 // INVARIANT: This is the exception case in the invariant; it is not visible to callers
220 // because this function consumes `self`.
221 self.0.take().unwrap().0
225 impl ScopeGuard<(), fn(())> {
226 /// Creates a new guarded object with the given cleanup function.
227 pub fn new(cleanup: impl FnOnce()) -> ScopeGuard<(), impl FnOnce(())> {
228 ScopeGuard::new_with_data((), move |_| cleanup())
232 impl<T, F: FnOnce(T)> Deref for ScopeGuard<T, F> {
235 fn deref(&self) -> &T {
236 // The type invariants guarantee that `unwrap` will succeed.
237 &self.0.as_ref().unwrap().0
241 impl<T, F: FnOnce(T)> DerefMut for ScopeGuard<T, F> {
242 fn deref_mut(&mut self) -> &mut T {
243 // The type invariants guarantee that `unwrap` will succeed.
244 &mut self.0.as_mut().unwrap().0
248 impl<T, F: FnOnce(T)> Drop for ScopeGuard<T, F> {
250 // Run the cleanup function if one is still present.
251 if let Some((data, cleanup)) = self.0.take() {
257 /// Stores an opaque value.
259 /// This is meant to be used with FFI objects that are never interpreted by Rust code.
261 pub struct Opaque<T> {
262 value: UnsafeCell<MaybeUninit<T>>,
267 /// Creates a new opaque value.
268 pub const fn new(value: T) -> Self {
270 value: UnsafeCell::new(MaybeUninit::new(value)),
275 /// Creates an uninitialised value.
276 pub const fn uninit() -> Self {
278 value: UnsafeCell::new(MaybeUninit::uninit()),
283 /// Creates a pin-initializer from the given initializer closure.
285 /// The returned initializer calls the given closure with the pointer to the inner `T` of this
286 /// `Opaque`. Since this memory is uninitialized, the closure is not allowed to read from it.
288 /// This function is safe, because the `T` inside of an `Opaque` is allowed to be
289 /// uninitialized. Additionally, access to the inner `T` requires `unsafe`, so the caller needs
290 /// to verify at that point that the inner value is valid.
291 pub fn ffi_init(init_func: impl FnOnce(*mut T)) -> impl PinInit<Self> {
292 // SAFETY: We contain a `MaybeUninit`, so it is OK for the `init_func` to not fully
293 // initialize the `T`.
295 init::pin_init_from_closure::<_, ::core::convert::Infallible>(move |slot| {
296 init_func(Self::raw_get(slot));
302 /// Returns a raw pointer to the opaque data.
303 pub const fn get(&self) -> *mut T {
304 UnsafeCell::get(&self.value).cast::<T>()
307 /// Gets the value behind `this`.
309 /// This function is useful to get access to the value without creating intermediate
311 pub const fn raw_get(this: *const Self) -> *mut T {
312 UnsafeCell::raw_get(this.cast::<UnsafeCell<MaybeUninit<T>>>()).cast::<T>()
316 /// Types that are _always_ reference counted.
318 /// It allows such types to define their own custom ref increment and decrement functions.
319 /// Additionally, it allows users to convert from a shared reference `&T` to an owned reference
322 /// This is usually implemented by wrappers to existing structures on the C side of the code. For
323 /// Rust code, the recommendation is to use [`Arc`](crate::sync::Arc) to create reference-counted
324 /// instances of a type.
328 /// Implementers must ensure that increments to the reference count keep the object alive in memory
329 /// at least until matching decrements are performed.
331 /// Implementers must also ensure that all instances are reference-counted. (Otherwise they
332 /// won't be able to honour the requirement that [`AlwaysRefCounted::inc_ref`] keep the object
334 pub unsafe trait AlwaysRefCounted {
335 /// Increments the reference count on the object.
338 /// Decrements the reference count on the object.
340 /// Frees the object when the count reaches zero.
344 /// Callers must ensure that there was a previous matching increment to the reference count,
345 /// and that the object is no longer used after its reference count is decremented (as it may
346 /// result in the object being freed), unless the caller owns another increment on the refcount
347 /// (e.g., it calls [`AlwaysRefCounted::inc_ref`] twice, then calls
348 /// [`AlwaysRefCounted::dec_ref`] once).
349 unsafe fn dec_ref(obj: NonNull<Self>);
352 /// An owned reference to an always-reference-counted object.
354 /// The object's reference count is automatically decremented when an instance of [`ARef`] is
355 /// dropped. It is also automatically incremented when a new instance is created via
360 /// The pointer stored in `ptr` is non-null and valid for the lifetime of the [`ARef`] instance. In
361 /// particular, the [`ARef`] instance owns an increment on the underlying object's reference count.
362 pub struct ARef<T: AlwaysRefCounted> {
367 // SAFETY: It is safe to send `ARef<T>` to another thread when the underlying `T` is `Sync` because
368 // it effectively means sharing `&T` (which is safe because `T` is `Sync`); additionally, it needs
369 // `T` to be `Send` because any thread that has an `ARef<T>` may ultimately access `T` using a
370 // mutable reference, for example, when the reference count reaches zero and `T` is dropped.
371 unsafe impl<T: AlwaysRefCounted + Sync + Send> Send for ARef<T> {}
373 // SAFETY: It is safe to send `&ARef<T>` to another thread when the underlying `T` is `Sync`
374 // because it effectively means sharing `&T` (which is safe because `T` is `Sync`); additionally,
375 // it needs `T` to be `Send` because any thread that has a `&ARef<T>` may clone it and get an
376 // `ARef<T>` on that thread, so the thread may ultimately access `T` using a mutable reference, for
377 // example, when the reference count reaches zero and `T` is dropped.
378 unsafe impl<T: AlwaysRefCounted + Sync + Send> Sync for ARef<T> {}
380 impl<T: AlwaysRefCounted> ARef<T> {
381 /// Creates a new instance of [`ARef`].
383 /// It takes over an increment of the reference count on the underlying object.
387 /// Callers must ensure that the reference count was incremented at least once, and that they
388 /// are properly relinquishing one increment. That is, if there is only one increment, callers
389 /// must not use the underlying object anymore -- it is only safe to do so via the newly
390 /// created [`ARef`].
391 pub unsafe fn from_raw(ptr: NonNull<T>) -> Self {
392 // INVARIANT: The safety requirements guarantee that the new instance now owns the
393 // increment on the refcount.
400 /// Consumes the `ARef`, returning a raw pointer.
402 /// This function does not change the refcount. After calling this function, the caller is
403 /// responsible for the refcount previously managed by the `ARef`.
408 /// use core::ptr::NonNull;
409 /// use kernel::types::{ARef, AlwaysRefCounted};
413 /// unsafe impl AlwaysRefCounted for Empty {
414 /// fn inc_ref(&self) {}
415 /// unsafe fn dec_ref(_obj: NonNull<Self>) {}
418 /// let mut data = Empty {};
419 /// let ptr = NonNull::<Empty>::new(&mut data as *mut _).unwrap();
420 /// let data_ref: ARef<Empty> = unsafe { ARef::from_raw(ptr) };
421 /// let raw_ptr: NonNull<Empty> = ARef::into_raw(data_ref);
423 /// assert_eq!(ptr, raw_ptr);
425 pub fn into_raw(me: Self) -> NonNull<T> {
426 ManuallyDrop::new(me).ptr
430 impl<T: AlwaysRefCounted> Clone for ARef<T> {
431 fn clone(&self) -> Self {
433 // SAFETY: We just incremented the refcount above.
434 unsafe { Self::from_raw(self.ptr) }
438 impl<T: AlwaysRefCounted> Deref for ARef<T> {
441 fn deref(&self) -> &Self::Target {
442 // SAFETY: The type invariants guarantee that the object is valid.
443 unsafe { self.ptr.as_ref() }
447 impl<T: AlwaysRefCounted> From<&T> for ARef<T> {
448 fn from(b: &T) -> Self {
450 // SAFETY: We just incremented the refcount above.
451 unsafe { Self::from_raw(NonNull::from(b)) }
455 impl<T: AlwaysRefCounted> Drop for ARef<T> {
457 // SAFETY: The type invariants guarantee that the `ARef` owns the reference we're about to
459 unsafe { T::dec_ref(self.ptr) };
463 /// A sum type that always holds either a value of type `L` or `R`.
464 pub enum Either<L, R> {
465 /// Constructs an instance of [`Either`] containing a value of type `L`.
468 /// Constructs an instance of [`Either`] containing a value of type `R`.
472 /// Types for which any bit pattern is valid.
474 /// Not all types are valid for all values. For example, a `bool` must be either zero or one, so
475 /// reading arbitrary bytes into something that contains a `bool` is not okay.
477 /// It's okay for the type to have padding, as initializing those bytes has no effect.
481 /// All bit-patterns must be valid for this type. This type must not have interior mutability.
482 pub unsafe trait FromBytes {}
484 // SAFETY: All bit patterns are acceptable values of the types below.
485 unsafe impl FromBytes for u8 {}
486 unsafe impl FromBytes for u16 {}
487 unsafe impl FromBytes for u32 {}
488 unsafe impl FromBytes for u64 {}
489 unsafe impl FromBytes for usize {}
490 unsafe impl FromBytes for i8 {}
491 unsafe impl FromBytes for i16 {}
492 unsafe impl FromBytes for i32 {}
493 unsafe impl FromBytes for i64 {}
494 unsafe impl FromBytes for isize {}
495 // SAFETY: If all bit patterns are acceptable for individual values in an array, then all bit
496 // patterns are also acceptable for arrays of that type.
497 unsafe impl<T: FromBytes> FromBytes for [T] {}
498 unsafe impl<T: FromBytes, const N: usize> FromBytes for [T; N] {}
500 /// Types that can be viewed as an immutable slice of initialized bytes.
502 /// If a struct implements this trait, then it is okay to copy it byte-for-byte to userspace. This
503 /// means that it should not have any padding, as padding bytes are uninitialized. Reading
504 /// uninitialized memory is not just undefined behavior, it may even lead to leaking sensitive
505 /// information on the stack to userspace.
507 /// The struct should also not hold kernel pointers, as kernel pointer addresses are also considered
508 /// sensitive. However, leaking kernel pointers is not considered undefined behavior by Rust, so
509 /// this is a correctness requirement, but not a safety requirement.
513 /// Values of this type may not contain any uninitialized bytes. This type must not have interior
515 pub unsafe trait AsBytes {}
517 // SAFETY: Instances of the following types have no uninitialized portions.
518 unsafe impl AsBytes for u8 {}
519 unsafe impl AsBytes for u16 {}
520 unsafe impl AsBytes for u32 {}
521 unsafe impl AsBytes for u64 {}
522 unsafe impl AsBytes for usize {}
523 unsafe impl AsBytes for i8 {}
524 unsafe impl AsBytes for i16 {}
525 unsafe impl AsBytes for i32 {}
526 unsafe impl AsBytes for i64 {}
527 unsafe impl AsBytes for isize {}
528 unsafe impl AsBytes for bool {}
529 unsafe impl AsBytes for char {}
530 unsafe impl AsBytes for str {}
531 // SAFETY: If individual values in an array have no uninitialized portions, then the array itself
532 // does not have any uninitialized portions either.
533 unsafe impl<T: AsBytes> AsBytes for [T] {}
534 unsafe impl<T: AsBytes, const N: usize> AsBytes for [T; N] {}