diff --git a/src/alloc/boxed.rs b/src/alloc/boxed.rs new file mode 100644 index 000000000..c9f1f7c80 --- /dev/null +++ b/src/alloc/boxed.rs @@ -0,0 +1,715 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +// +// Copyright (C) 2023 SUSE +// +// Authors: Carlos López + +//! The `SvsmBox` type for heap allocation. +//! +//! [`SvsmBox`], casually referred to as a 'box', provides the simplest form of +//! heap allocation in Rust. Boxes provide ownership for this allocation, and +//! drop their contents when they go out of scope. Boxes also ensure that they +//! never allocate more than `isize::MAX` bytes. +//! +//! This is a downstream version of `Box` with a stabilized allocator API, +//! supporting fallible allocations exclusively. + +use core::alloc::Layout; +use core::borrow::{Borrow, BorrowMut}; +use core::cmp::Ordering; +use core::fmt; +use core::mem::{size_of, ManuallyDrop, MaybeUninit}; +use core::ops::{Deref, DerefMut}; +use core::pin::Pin; +use core::ptr::{self, NonNull}; + +use super::unique::SvsmUnique as Unique; +use super::{SvsmAlloc as Allocator, SvsmAllocError}; + +/// A pointer type that uniquely owns a heap allocation of type `T`, generic +/// over any given allocator, and supporting failable allocations. +/// +/// This is a downstream version of `Box` with a stabilized allocator API, +/// supporting fallible allocations exclusively. +pub struct SvsmBox(Unique, A); + +impl SvsmBox { + /// Allocates memory in the given allocator then places `x` into it, + /// returning an error if the allocation fails + /// + /// This doesn't actually allocate if `T` is zero-sized. + /// + /// # Examples + /// + /// ``` + /// # use svsm::alloc::{boxed::SvsmBox, SvsmAllocError}; + /// use std::alloc::System; + /// + /// let five = SvsmBox::try_new_in(5, System)?; + /// # Ok::<(), SvsmAllocError>(()) + /// ``` + #[inline] + pub fn try_new_in(x: T, alloc: A) -> Result { + let mut boxed = Self::try_new_uninit_in(alloc)?; + unsafe { + boxed.as_mut_ptr().write(x); + Ok(boxed.assume_init()) + } + } + + /// Allocates memory in the given allocator then places `x` into it, + /// returning an error if the allocation fails + /// + /// This doesn't actually allocate if `T` is zero-sized. + /// + /// # Examples + /// + /// ``` + /// # use svsm::alloc::{boxed::SvsmBox, SvsmAllocError}; + /// use std::alloc::System; + /// + /// let five = SvsmBox::try_new_in(5, System)?; + /// # Ok::<(), SvsmAllocError>(()) + /// ``` + pub fn try_new_uninit_in(alloc: A) -> Result, A>, SvsmAllocError> { + let ptr = if size_of::() == 0 { + NonNull::dangling() + } else { + let layout = Layout::new::>(); + alloc.allocate(layout)?.cast() + }; + unsafe { Ok(SvsmBox::from_raw_in(ptr.as_ptr(), alloc)) } + } + + /// Constructs a new `SvsmBox` with uninitialized contents, with the memory + /// being filled with `0` bytes in the provided allocator. + /// + /// See [`MaybeUninit::zeroed`] for examples of correct and incorrect usage + /// of this method. + /// + /// # Examples + /// + /// ``` + /// # use svsm::alloc::{boxed::SvsmBox, SvsmAllocError}; + /// use std::alloc::System; + /// + /// let zero = SvsmBox::::try_new_zeroed_in(System)?; + /// let zero = unsafe { zero.assume_init() }; + /// + /// assert_eq!(*zero, 0); + /// # Ok::<(), SvsmAllocError>(()) + /// ``` + pub fn try_new_zeroed_in(alloc: A) -> Result, A>, SvsmAllocError> { + let ptr = if size_of::() == 0 { + NonNull::dangling() + } else { + let layout = Layout::new::>(); + alloc.allocate_zeroed(layout)?.cast() + }; + unsafe { Ok(SvsmBox::from_raw_in(ptr.as_ptr(), alloc)) } + } + + /// Consumes the `SvsmBox`, returning the wrapped value. + /// + /// # Examples + /// + /// ``` + /// # use svsm::alloc::{boxed::SvsmBox, SvsmAllocError}; + /// + /// use std::alloc::{Layout, System}; + /// + /// let c = SvsmBox::try_new_in(5, System)?; + /// + /// assert_eq!(SvsmBox::into_inner(c), 5); + /// # Ok::<(), SvsmAllocError>(()) + /// ``` + pub fn into_inner(self) -> T { + unsafe { self.0.as_ptr().read() } + } + + /// Constructs a new `Pin>`. If `T` does not implement + /// [`Unpin`], then `x` will be pinned in memory and unable to be + /// moved. + /// + /// Constructing and pinning of the `SvsmBox` can also be done in two + /// steps: `SvsmBox::try_pin_in(x, alloc)` does the same as + /// [SvsmBox::into_pin]\([SvsmBox::try_new_in]\(x, alloc)?). + /// Consider using [`into_pin`](SvsmBox::into_pin) if you already have a + /// `SvsmBox`, or if you want to construct a (pinned) `SvsmBox` in + /// a different way than with [`SvsmBox::try_new_in`]. + pub fn try_pin_in(x: T, alloc: A) -> Result, SvsmAllocError> + where + A: 'static + Allocator, + { + let boxed = Self::try_new_in(x, alloc)?; + Ok(Self::into_pin(boxed)) + } + + /// Constructs a box from a raw pointer in the given allocator. + /// + /// After calling this function, the raw pointer is owned by the + /// resulting `SvsmBox`. Specifically, the `SvsmBox` destructor will call + /// the destructor of `T` and free the allocated memory. For this + /// to be safe, the memory must have been allocated in accordance + /// with the memory layout used by `SvsmBox` . + /// + /// # Safety + /// + /// This function is unsafe because improper use may lead to + /// memory problems. For example, a double-free may occur if the + /// function is called twice on the same raw pointer. + /// + /// + /// # Examples + /// + /// Recreate a `SvsmBox` which was previously converted to a raw pointer + /// using [`SvsmBox::into_raw_with_allocator`]: + /// ``` + /// # use svsm::alloc::{boxed::SvsmBox, SvsmAllocError}; + /// use std::alloc::System; + /// + /// let x = SvsmBox::try_new_in(5, System)?; + /// let (ptr, alloc) = SvsmBox::into_raw_with_allocator(x); + /// let x = unsafe { SvsmBox::from_raw_in(ptr, alloc) }; + /// # Ok::<(), SvsmAllocError>(()) + /// ``` + /// Manually create a `SvsmBox` from scratch by using the system allocator: + /// ``` + /// # use svsm::alloc::{boxed::SvsmBox, SvsmAlloc, SvsmAllocError}; + /// use std::alloc::{Layout, System}; + /// + /// unsafe { + /// let ptr = System.allocate(Layout::new::())?.as_ptr() as *mut i32; + /// // In general .write is required to avoid attempting to destruct + /// // the (uninitialized) previous contents of `ptr`, though for this + /// // simple example `*ptr = 5` would have worked as well. + /// ptr.write(5); + /// let x = SvsmBox::from_raw_in(ptr, System); + /// } + /// # Ok::<(), SvsmAllocError>(()) + /// ``` + #[inline] + pub unsafe fn from_raw_in(raw: *mut T, alloc: A) -> Self { + Self(unsafe { Unique::new_unchecked(raw) }, alloc) + } + + /// Consumes the `SvsmBox`, returning a wrapped raw pointer. + /// + /// The pointer will be properly aligned and non-null. + /// + /// After calling this function, the caller is responsible for the + /// memory previously managed by the `SvsmBox`. In particular, the + /// caller should properly destroy `T` and release the memory, taking + /// into account the memory layout used by `SvsmBox`. The easiest way to + /// do this is to convert the raw pointer back into a `SvsmBox` with the + /// [`SvsmBox::from_raw_in`] function, allowing the `SvsmBox` destructor to perform + /// the cleanup. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `SvsmBox::into_raw(b)` instead of `b.into_raw()`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// Converting the raw pointer back into a `SvsmBox` with [`SvsmBox::from_raw_in`] + /// for automatic cleanup: + /// ``` + /// # use svsm::alloc::{boxed::SvsmBox, SvsmAllocError}; + /// use std::alloc::System; + /// + /// let x = SvsmBox::try_new_in(String::from("Hello"), System)?; + /// let ptr = SvsmBox::into_raw(x); + /// let x = unsafe { SvsmBox::from_raw_in(ptr, System) }; + /// # Ok::<(), SvsmAllocError>(()) + /// ``` + /// Manual cleanup by explicitly running the destructor and deallocating + /// the memory: + /// ``` + /// # use svsm::alloc::{boxed::SvsmBox, SvsmAlloc, SvsmAllocError}; + /// use std::alloc::{Layout, System}; + /// use std::ptr::{self, NonNull}; + /// + /// let x = SvsmBox::try_new_in(String::from("Hello"), System)?; + /// let p = SvsmBox::into_raw(x); + /// unsafe { + /// ptr::drop_in_place(p); + /// let non_null = NonNull::new_unchecked(p); + /// System.deallocate(non_null.cast(), Layout::new::()); + /// } + /// # Ok::<(), SvsmAllocError>(()) + /// ``` + #[inline] + pub fn into_raw(b: Self) -> *mut T { + Self::into_raw_with_allocator(b).0 + } + + /// Consumes the `SvsmBox`, returning a wrapped raw pointer and the allocator. + /// + /// The pointer will be properly aligned and non-null. + /// + /// After calling this function, the caller is responsible for the + /// memory previously managed by the `SvsmBox`. In particular, the + /// caller should properly destroy `T` and release the memory, taking + /// into account the memory layout used by `SvsmBox`. The easiest way to + /// do this is to convert the raw pointer back into a `SvsmBox` with the + /// [`SvsmBox::from_raw_in`] function, allowing the `SvsmBox` destructor to perform + /// the cleanup. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `SvsmBox::into_raw_with_allocator(b)` instead of `b.into_raw_with_allocator()`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// Converting the raw pointer back into a `SvsmBox` with [`SvsmBox::from_raw_in`] + /// for automatic cleanup: + /// ``` + /// # use svsm::alloc::{boxed::SvsmBox, SvsmAllocError}; + /// use std::alloc::System; + /// + /// let x = SvsmBox::try_new_in(String::from("Hello"), System)?; + /// let (ptr, alloc) = SvsmBox::into_raw_with_allocator(x); + /// let x = unsafe { SvsmBox::from_raw_in(ptr, alloc) }; + /// # Ok::<(), SvsmAllocError>(()) + /// ``` + /// Manual cleanup by explicitly running the destructor and deallocating + /// the memory: + /// ``` + /// # use svsm::alloc::{boxed::SvsmBox, SvsmAlloc, SvsmAllocError}; + /// + /// use std::alloc::{Layout, System}; + /// use std::ptr::{self, NonNull}; + /// + /// let x = SvsmBox::try_new_in(String::from("Hello"), System)?; + /// let (ptr, alloc) = SvsmBox::into_raw_with_allocator(x); + /// unsafe { + /// ptr::drop_in_place(ptr); + /// let non_null = NonNull::new_unchecked(ptr); + /// alloc.deallocate(non_null.cast(), Layout::new::()); + /// } + /// # Ok::<(), SvsmAllocError>(()) + /// ``` + #[inline] + pub fn into_raw_with_allocator(b: Self) -> (*mut T, A) { + let (leaked, alloc) = SvsmBox::into_unique(b); + (leaked.as_ptr(), alloc) + } + + #[inline] + fn into_unique(b: Self) -> (Unique, A) { + // SvsmBox is recognized as a "unique pointer" by Stacked Borrows, but internally it is a + // raw pointer for the type system. Turning it directly into a raw pointer would not be + // recognized as "releasing" the unique pointer to permit aliased raw accesses, + // so all raw pointer methods have to go through `SvsmBox::leak`. Turning *that* to a raw pointer + // behaves correctly. + let alloc = unsafe { ptr::read(&b.1) }; + (Unique::from(Self::leak(b)), alloc) + } + + /// Returns a reference to the underlying allocator. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `SvsmBox::allocator(&b)` instead of `b.allocator()`. This + /// is so that there is no conflict with a method on the inner type. + #[inline] + pub const fn allocator(b: &Self) -> &A { + &b.1 + } + + /// Consumes and leaks the `SvsmBox`, returning a mutable reference, + /// `&'a mut T`. Note that the type `T` must outlive the chosen lifetime + /// `'a`. If the type has only static references, or none at all, then this + /// may be chosen to be `'static`. + /// + /// This function is mainly useful for data that lives for the remainder of + /// the program's life. Dropping the returned reference will cause a memory + /// leak. If this is not acceptable, the reference should first be wrapped + /// with the [`SvsmBox::from_raw_in`] function producing a `SvsmBox`. This `SvsmBox` can + /// then be dropped which will properly destroy `T` and release the + /// allocated memory. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `SvsmBox::leak(b)` instead of `b.leak()`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// + /// Simple usage: + /// + /// ``` + /// # use svsm::alloc::{boxed::SvsmBox, SvsmAllocError}; + /// use std::alloc::System; + /// + /// let x = SvsmBox::try_new_in(41, System)?; + /// let static_ref: &'static mut usize = SvsmBox::leak(x); + /// *static_ref += 1; + /// assert_eq!(*static_ref, 42); + /// + /// // Deallocate + /// let x = unsafe { SvsmBox::from_raw_in(static_ref, System) }; + /// # Ok::<(), SvsmAllocError>(()) + /// ``` + #[inline] + pub fn leak<'a>(b: Self) -> &'a mut T + where + A: 'a, + { + unsafe { &mut *ManuallyDrop::new(b).0.as_ptr() } + } + + /// Converts a `SvsmBox` into a `Pin>`. If `T` does not implement [`Unpin`], then + /// `*boxed` will be pinned in memory and unable to be moved. + /// + /// This conversion does not allocate on the heap and happens in place. + /// + /// This is also available via [`From`]. + /// + /// Constructing and pinning a `SvsmBox` with SvsmBox::into_pin([SvsmBox::try_new_in]\(x, alloc)) + /// can also be written more concisely using [SvsmBox::try_pin_in]\(x, alloc). + /// This `into_pin` method is useful if you already have a `SvsmBox`, or you are + /// constructing a (pinned) `SvsmBox` in a different way than with [`SvsmBox::try_new_in`]. + /// + /// # Notes + /// + /// It's not recommended that crates add an impl like `From> for Pin`, + /// as it'll introduce an ambiguity when calling `Pin::from`. + /// A demonstration of such a poor impl is shown below. + /// + /// ```compile_fail + /// # use svsm::alloc::boxed::SvsmBox; + /// # use std::pin::Pin; + /// struct Foo; // A type defined in this crate. + /// impl From> for Pin { + /// fn from(_: SvsmBox<()>) -> Pin { + /// Pin::new(Foo) + /// } + /// } + /// + /// let foo = SvsmBox::new(()); + /// let bar = Pin::from(foo); + /// ``` + pub fn into_pin(boxed: Self) -> Pin + where + A: 'static, + { + // It's not possible to move or replace the insides of a `Pin>` + // when `T: !Unpin`, so it's safe to pin it directly without any + // additional requirements. + unsafe { Pin::new_unchecked(boxed) } + } +} + +impl SvsmBox, A> { + /// Converts to `SvsmBox`. + /// + /// # Safety + /// + /// As with [`MaybeUninit::assume_init`], + /// it is up to the caller to guarantee that the value + /// really is in an initialized state. + /// Calling this when the content is not yet fully initialized + /// causes immediate undefined behavior. + /// + /// # Examples + /// + /// ``` + /// # use svsm::alloc::{boxed::SvsmBox, SvsmAllocError}; + /// use std::alloc::System; + /// + /// let mut five = SvsmBox::::try_new_uninit_in(System)?; + /// + /// let five = unsafe { + /// // Deferred initialization: + /// five.as_mut_ptr().write(5); + /// + /// five.assume_init() + /// }; + /// + /// assert_eq!(*five, 5); + /// # Ok::<(), SvsmAllocError>(()) + /// ``` + pub unsafe fn assume_init(self) -> SvsmBox { + let (raw, alloc) = SvsmBox::into_raw_with_allocator(self); + unsafe { SvsmBox::from_raw_in(raw as *mut T, alloc) } + } + + /// Writes the value and converts to `SvsmBox`. + /// + /// This method converts the box similarly to [`SvsmBox::assume_init`] but + /// writes `value` into it before conversion thus guaranteeing safety. + /// In some scenarios use of this method may improve performance because + /// the compiler may be able to optimize copying from stack. + /// + /// # Examples + /// + /// ``` + /// # use svsm::alloc::{boxed::SvsmBox, SvsmAllocError}; + /// use std::alloc::System; + /// + /// let big_box = SvsmBox::<[usize; 1024], _>::try_new_uninit_in(System)?; + /// + /// let mut array = [0; 1024]; + /// for (i, place) in array.iter_mut().enumerate() { + /// *place = i; + /// } + /// + /// // The optimizer may be able to elide this copy, so previous code writes + /// // to heap directly. + /// let big_box = SvsmBox::write(big_box, array); + /// + /// for (i, x) in big_box.iter().enumerate() { + /// assert_eq!(*x, i); + /// } + /// # Ok::<(), SvsmAllocError>(()) + /// ``` + pub fn write(mut boxed: Self, value: T) -> SvsmBox { + unsafe { + (*boxed).write(value); + boxed.assume_init() + } + } +} + +impl Drop for SvsmBox { + fn drop(&mut self) { + let ptr = self.0; + unsafe { + ptr.as_ptr().drop_in_place(); + let layout = Layout::new::(); + if layout.size() != 0 { + self.1.deallocate(From::from(ptr.cast()), layout); + } + } + } +} + +impl SvsmBox { + /// Allocates memory in the given allocator and places the default value + /// for `T` into it. + pub fn try_default_in(alloc: A) -> Result { + Self::try_new_in(T::default(), alloc) + } +} + +impl SvsmBox { + /// Returns a new `SvsmBox` with this box's contents. The new box is + /// allocated with this box's allocator. + pub fn try_clone(&self) -> Result { + let boxed = Self::try_new_uninit_in(self.1.clone())?; + Ok(SvsmBox::write(boxed, unsafe { self.0.as_ref().clone() })) + } +} + +impl SvsmBox { + /// Returns a new `SvsmBox` with this box's contents. The new box is + /// allocated with the given allocator. + pub fn try_clone_in(&self, alloc: A) -> Result { + let boxed = Self::try_new_uninit_in(alloc)?; + Ok(SvsmBox::write(boxed, unsafe { self.0.as_ref().clone() })) + } +} + +impl PartialEq for SvsmBox { + #[inline] + fn eq(&self, other: &Self) -> bool { + PartialEq::eq(&**self, &**other) + } +} + +impl PartialOrd for SvsmBox { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option { + PartialOrd::partial_cmp(&**self, &**other) + } + #[inline] + fn lt(&self, other: &Self) -> bool { + PartialOrd::lt(&**self, &**other) + } + #[inline] + fn le(&self, other: &Self) -> bool { + PartialOrd::le(&**self, &**other) + } + #[inline] + fn ge(&self, other: &Self) -> bool { + PartialOrd::ge(&**self, &**other) + } + #[inline] + fn gt(&self, other: &Self) -> bool { + PartialOrd::gt(&**self, &**other) + } +} + +impl Ord for SvsmBox { + #[inline] + fn cmp(&self, other: &Self) -> Ordering { + Ord::cmp(&**self, &**other) + } +} + +impl Eq for SvsmBox {} + +impl From> for Pin> +where + A: 'static, +{ + /// Converts a `SvsmBox` into a `Pin>`. If `T` does not implement [`Unpin`], then + /// `*boxed` will be pinned in memory and unable to be moved. + /// + /// This conversion does not allocate on the heap and happens in place. + /// + /// This is also available via [`SvsmBox::into_pin`]. + /// + /// Constructing and pinning a `SvsmBox` with >>::from([SvsmBox::try_new_in]\(x, alloc)?) + /// can also be written more concisely using [SvsmBox::try_pin_in]\(x, alloc)?. + /// This `From` implementation is useful if you already have a `SvsmBox`, or you are + /// constructing a (pinned) `SvsmBox` in a different way than with [`SvsmBox::try_new_in`]. + fn from(boxed: SvsmBox) -> Self { + SvsmBox::into_pin(boxed) + } +} + +impl fmt::Display for SvsmBox { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&**self, f) + } +} + +impl fmt::Debug for SvsmBox { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(&**self, f) + } +} + +impl fmt::Pointer for SvsmBox { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // It's not possible to extract the inner Uniq directly from the Box, + // instead we cast it to a *const which aliases the Unique + let ptr: *const T = &**self; + fmt::Pointer::fmt(&ptr, f) + } +} + +impl Deref for SvsmBox { + type Target = T; + + fn deref(&self) -> &T { + unsafe { &*self.0.as_ptr() } + } +} + +impl DerefMut for SvsmBox { + fn deref_mut(&mut self) -> &mut T { + unsafe { &mut *self.0.as_ptr() } + } +} + +impl Borrow for SvsmBox { + fn borrow(&self) -> &T { + unsafe { &*self.0.as_ptr() } + } +} + +impl BorrowMut for SvsmBox { + fn borrow_mut(&mut self) -> &mut T { + unsafe { &mut *self.0.as_ptr() } + } +} + +impl AsRef for SvsmBox { + fn as_ref(&self) -> &T { + unsafe { &*self.0.as_ptr() } + } +} + +impl AsMut for SvsmBox { + fn as_mut(&mut self) -> &mut T { + unsafe { &mut *self.0.as_ptr() } + } +} + +/* Nota bene + * + * We could have chosen not to add this impl, and instead have written a + * function of Pin> to Pin. Such a function would not be sound, + * because Box implements Unpin even when T does not, as a result of + * this impl. + * + * We chose this API instead of the alternative for a few reasons: + * - Logically, it is helpful to understand pinning in regard to the + * memory region being pointed to. For this reason none of the + * standard library pointer types support projecting through a pin + * (Box is the only pointer type in std for which this would be + * safe.) + * - It is in practice very useful to have Box be unconditionally + * Unpin because of trait objects, for which the structural auto + * trait functionality does not apply (e.g., Box would + * otherwise not be Unpin). + * + * Another type with the same semantics as Box but only a conditional + * implementation of `Unpin` (where `T: Unpin`) would be valid/safe, and + * could have a method to project a Pin from it. + */ +impl Unpin for SvsmBox where A: 'static {} + +#[cfg(test)] +mod tests { + extern crate std; + use super::*; + use std::alloc::System; + + #[test] + fn box_try_new() { + let obj = SvsmBox::try_new_in(5, System).unwrap(); + assert_eq!(*obj, 5); + } + + #[test] + fn box_try_uninit() { + let mut obj = SvsmBox::::try_new_uninit_in(System).unwrap(); + // SAFETY: SvsmBox owns valid memory. Memory is initialized before use. + let init = unsafe { + obj.as_mut_ptr().write(5); + obj.assume_init() + }; + assert_eq!(*init, 5); + } + + #[test] + fn box_try_uninit_write() { + let obj = SvsmBox::::try_new_uninit_in(System).unwrap(); + let init = SvsmBox::write(obj, 7); + assert_eq!(*init, 7); + } + + #[test] + fn box_try_zeroed() { + let obj = SvsmBox::::try_new_zeroed_in(System).unwrap(); + // SAFETY: memory is initialized to zero, which is valid for u32 + let init = unsafe { obj.assume_init() }; + assert_eq!(*init, 0); + } + + #[test] + fn box_nested_deref() { + let inner = SvsmBox::try_new_in([13; 32], System).unwrap(); + { + let outer = SvsmBox::try_new_in(inner, System).unwrap(); + assert_eq!(**outer, [13; 32]); + } + } + + #[test] + fn box_try_clone() { + let first = SvsmBox::try_new_in([13; 32], System).unwrap(); + let second = first.try_clone().unwrap(); + drop(first); + assert_eq!(*second, [13; 32]); + } + + #[test] + fn box_try_clone_mut() { + let mut first = SvsmBox::try_new_in([13; 32], System).unwrap(); + let second = first.try_clone().unwrap(); + first.fill(14); + assert_eq!(*second, [13; 32]); + assert_eq!(*first, [14; 32]); + } +} diff --git a/src/alloc/mod.rs b/src/alloc/mod.rs new file mode 100644 index 000000000..e4ec7d642 --- /dev/null +++ b/src/alloc/mod.rs @@ -0,0 +1,403 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +// +// Copyright (C) 2023 SUSE +// +// Authors: Carlos López + +//! An adapted version of the upstream Rust alloc crate with a stabilized allocator API. + +use core::alloc::{Layout, LayoutError}; +use core::ptr::{self, NonNull}; + +pub mod boxed; +mod unique; + +/// A stable version of [`AllocError`](core::alloc::AllocError). +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum SvsmAllocError { + OutOfMemory, + ZeroSized, + Layout(LayoutError), + CapacityOverflow, +} + +impl From for SvsmAllocError { + fn from(err: LayoutError) -> Self { + Self::Layout(err) + } +} + +/// A stable version of the [`Allocator`](core::alloc::Allocator) trait. +/// +/// An implementation of `SvsmAlloc` can allocate, grow, shrink, and deallocate arbitrary blocks of +/// data described via [`Layout`][]. +/// +/// `SvsmAlloc` is designed to be implemented on ZSTs, references, or smart pointers because having +/// an allocator like `MyAlloc([u8; N])` cannot be moved, without updating the pointers to the +/// allocated memory. +/// +/// Unlike [`GlobalAlloc`], zero-sized allocations are allowed in `SvsmAlloc`. If an underlying +/// allocator does not support this (like jemalloc) or return a null pointer (such as +/// `libc::malloc`), this must be caught by the implementation. +/// +/// ### Currently allocated memory +/// +/// Some of the methods require that a memory block be *currently allocated* via an allocator. This +/// means that: +/// +/// * the starting address for that memory block was previously returned by [`allocate`], [`grow`], or +/// [`shrink`], and +/// +/// * the memory block has not been subsequently deallocated, where blocks are either deallocated +/// directly by being passed to [`deallocate`] or were changed by being passed to [`grow`] or +/// [`shrink`] that returns `Ok`. If `grow` or `shrink` have returned `Err`, the passed pointer +/// remains valid. +/// +/// [`allocate`]: SvsmAlloc::allocate +/// [`grow`]: SvsmAlloc::grow +/// [`shrink`]: SvsmAlloc::shrink +/// [`deallocate`]: SvsmAlloc::deallocate +/// [`GlobalAlloc`]: core::alloc::GlobalAlloc +/// +/// ### Memory fitting +/// +/// Some of the methods require that a layout *fit* a memory block. What it means for a layout to +/// "fit" a memory block means (or equivalently, for a memory block to "fit" a layout) is that the +/// following conditions must hold: +/// +/// * The block must be allocated with the same alignment as [`layout.align()`], and +/// +/// * The provided [`layout.size()`] must fall in the range `min ..= max`, where: +/// - `min` is the size of the layout most recently used to allocate the block, and +/// - `max` is the latest actual size returned from [`allocate`], [`grow`], or [`shrink`]. +/// +/// [`layout.align()`]: Layout::align +/// [`layout.size()`]: Layout::size +/// +/// # Safety +/// +/// * Memory blocks returned from an allocator that are [*currently allocated*] must point to +/// valid memory and retain their validity while they are [*currently allocated*] and at +/// least one of the instance and all of its clones has not been dropped. +/// +/// * copying, cloning, or moving the allocator must not invalidate memory blocks returned from this +/// allocator. A copied or cloned allocator must behave like the same allocator, and +/// +/// * any pointer to a memory block which is [*currently allocated*] may be passed to any other +/// method of the allocator. +/// +/// [*currently allocated*]: #currently-allocated-memory +pub unsafe trait SvsmAlloc { + /// Attempts to allocate a block of memory. + /// + /// On success, returns a [`NonNull<[u8]>`][NonNull] meeting the size and alignment guarantees of `layout`. + /// + /// The returned block may have a larger size than specified by `layout.size()`, and may or may + /// not have its contents initialized. + /// + /// # Errors + /// + /// Returning `Err` indicates that either memory is exhausted or `layout` does not meet + /// allocator's size or alignment constraints. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or + /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement + /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) + fn allocate(&self, layout: Layout) -> Result, SvsmAllocError>; + + /// Behaves like `allocate`, but also ensures that the returned memory is zero-initialized. + /// + /// # Errors + /// + /// Returning `Err` indicates that either memory is exhausted or `layout` does not meet + /// allocator's size or alignment constraints. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or + /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement + /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) + fn allocate_zeroed(&self, layout: Layout) -> Result, SvsmAllocError> { + let ptr = self.allocate(layout)?; + // SAFETY: `alloc` returns a valid memory block + unsafe { ptr.as_ptr().cast::().write_bytes(0, ptr.len()) }; + Ok(ptr) + } + + /// Deallocates the memory referenced by `ptr`. + /// + /// # Safety + /// + /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator, and + /// * `layout` must [*fit*] that block of memory. + /// + /// [*currently allocated*]: #currently-allocated-memory + /// [*fit*]: #memory-fitting + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout); + + /// Attempts to extend the memory block. + /// + /// Returns a new [`NonNull<[u8]>`][NonNull] containing a pointer and the actual size of the allocated + /// memory. The pointer is suitable for holding data described by `new_layout`. To accomplish + /// this, the allocator may extend the allocation referenced by `ptr` to fit the new layout. + /// + /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been + /// transferred to this allocator. Any access to the old `ptr` is Undefined Behavior, even if the + /// allocation was grown in-place. The newly returned pointer is the only valid pointer + /// for accessing this memory now. + /// + /// If this method returns `Err`, then ownership of the memory block has not been transferred to + /// this allocator, and the contents of the memory block are unaltered. + /// + /// # Safety + /// + /// * `ptr` must denote a block of memory currently allocated via this allocator. + /// * `old_layout` must fit that block of memory (The `new_layout` argument need not fit it.). + /// * `new_layout.size()` must be greater than or equal to `old_layout.size()`. + /// + /// Note that `new_layout.align()` need not be the same as `old_layout.align()`. + /// + /// [*currently allocated*]: #currently-allocated-memory + /// [*fit*]: #memory-fitting + /// + /// # Errors + /// + /// Returns `Err` if the new layout does not meet the allocator's size and alignment + /// constraints of the allocator, or if growing otherwise fails. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or + /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement + /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) + unsafe fn grow( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, SvsmAllocError> { + debug_assert!( + new_layout.size() >= old_layout.size(), + "`new_layout.size()` must be greater than or equal to `old_layout.size()`" + ); + + let new_ptr = self.allocate(new_layout)?; + + // SAFETY: because `new_layout.size()` must be greater than or equal to + // `old_layout.size()`, both the old and new memory allocation are valid for reads and + // writes for `old_layout.size()` bytes. Also, because the old allocation wasn't yet + // deallocated, it cannot overlap `new_ptr`. Thus, the call to `copy_nonoverlapping` is + // safe. The safety contract for `dealloc` must be upheld by the caller. + unsafe { + ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr().cast(), old_layout.size()); + self.deallocate(ptr, old_layout); + } + + Ok(new_ptr) + } + + /// Behaves like `grow`, but also ensures that the new contents are set to zero before being + /// returned. + /// + /// The memory block will contain the following contents after a successful call to + /// `grow_zeroed`: + /// * Bytes `0..old_layout.size()` are preserved from the original allocation. + /// * Bytes `old_layout.size()..old_size` will either be preserved or zeroed, depending on + /// the allocator implementation. `old_size` refers to the size of the memory block prior + /// to the `grow_zeroed` call, which may be larger than the size that was originally + /// requested when it was allocated. + /// * Bytes `old_size..new_size` are zeroed. `new_size` refers to the size of the memory + /// block returned by the `grow_zeroed` call. + /// + /// # Safety + /// + /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator. + /// * `old_layout` must [*fit*] that block of memory (The `new_layout` argument need not fit it.). + /// * `new_layout.size()` must be greater than or equal to `old_layout.size()`. + /// + /// Note that `new_layout.align()` need not be the same as `old_layout.align()`. + /// + /// [*currently allocated*]: #currently-allocated-memory + /// [*fit*]: #memory-fitting + /// + /// # Errors + /// + /// Returns `Err` if the new layout does not meet the allocator's size and alignment + /// constraints of the allocator, or if growing otherwise fails. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or + /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement + /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) + unsafe fn grow_zeroed( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, SvsmAllocError> { + debug_assert!( + new_layout.size() >= old_layout.size(), + "`new_layout.size()` must be greater than or equal to `old_layout.size()`" + ); + + let new_ptr = self.allocate_zeroed(new_layout)?; + + // SAFETY: because `new_layout.size()` must be greater than or equal to + // `old_layout.size()`, both the old and new memory allocation are valid for reads and + // writes for `old_layout.size()` bytes. Also, because the old allocation wasn't yet + // deallocated, it cannot overlap `new_ptr`. Thus, the call to `copy_nonoverlapping` is + // safe. The safety contract for `dealloc` must be upheld by the caller. + unsafe { + ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr().cast(), old_layout.size()); + self.deallocate(ptr, old_layout); + } + + Ok(new_ptr) + } + + /// Attempts to shrink the memory block. + /// + /// Returns a new [`NonNull<[u8]>`][NonNull] containing a pointer and the actual size of the allocated + /// memory. The pointer is suitable for holding data described by `new_layout`. To accomplish + /// this, the allocator may shrink the allocation referenced by `ptr` to fit the new layout. + /// + /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been + /// transferred to this allocator. Any access to the old `ptr` is Undefined Behavior, even if the + /// allocation was shrunk in-place. The newly returned pointer is the only valid pointer + /// for accessing this memory now. + /// + /// If this method returns `Err`, then ownership of the memory block has not been transferred to + /// this allocator, and the contents of the memory block are unaltered. + /// + /// # Safety + /// + /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator. + /// * `old_layout` must [*fit*] that block of memory (The `new_layout` argument need not fit it.). + /// * `new_layout.size()` must be smaller than or equal to `old_layout.size()`. + /// + /// Note that `new_layout.align()` need not be the same as `old_layout.align()`. + /// + /// [*currently allocated*]: #currently-allocated-memory + /// [*fit*]: #memory-fitting + /// + /// # Errors + /// + /// Returns `Err` if the new layout does not meet the allocator's size and alignment + /// constraints of the allocator, or if shrinking otherwise fails. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or + /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement + /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) + /// + /// Clients wishing to abort computation in response to an allocation error are encouraged to + /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. + /// + /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html + unsafe fn shrink( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, SvsmAllocError> { + debug_assert!( + new_layout.size() <= old_layout.size(), + "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" + ); + + let new_ptr = self.allocate(new_layout)?; + + // SAFETY: because `new_layout.size()` must be lower than or equal to + // `old_layout.size()`, both the old and new memory allocation are valid for reads and + // writes for `new_layout.size()` bytes. Also, because the old allocation wasn't yet + // deallocated, it cannot overlap `new_ptr`. Thus, the call to `copy_nonoverlapping` is + // safe. The safety contract for `dealloc` must be upheld by the caller. + unsafe { + ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr().cast(), new_layout.size()); + self.deallocate(ptr, old_layout); + } + + Ok(new_ptr) + } + + /// Creates a "by reference" adapter for this instance of `SvsmAlloc`. + /// + /// The returned adapter also implements `SvsmAlloc` and will simply borrow this. + #[inline(always)] + fn by_ref(&self) -> &Self + where + Self: Sized, + { + self + } +} + +unsafe impl SvsmAlloc for &A +where + A: SvsmAlloc + ?Sized, +{ + #[inline] + fn allocate(&self, layout: Layout) -> Result, SvsmAllocError> { + (**self).allocate(layout) + } + + #[inline] + fn allocate_zeroed(&self, layout: Layout) -> Result, SvsmAllocError> { + (**self).allocate_zeroed(layout) + } + + #[inline] + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).deallocate(ptr, layout) } + } + + #[inline] + unsafe fn grow( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, SvsmAllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).grow(ptr, old_layout, new_layout) } + } + + #[inline] + unsafe fn grow_zeroed( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, SvsmAllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).grow_zeroed(ptr, old_layout, new_layout) } + } + + #[inline] + unsafe fn shrink( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, SvsmAllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).shrink(ptr, old_layout, new_layout) } + } +} + +#[cfg(not(target_os = "none"))] +extern crate std; +#[cfg(not(target_os = "none"))] +use core::alloc::GlobalAlloc; +#[cfg(not(target_os = "none"))] +unsafe impl SvsmAlloc for std::alloc::System { + fn allocate(&self, layout: Layout) -> Result, SvsmAllocError> { + let size = layout.size(); + if size == 0 { + return Err(SvsmAllocError::ZeroSized); + } + // SAFETY: size is nonzero + let raw_ptr = unsafe { self.alloc(layout) }; + let ptr = NonNull::new(raw_ptr).ok_or(SvsmAllocError::OutOfMemory)?; + Ok(NonNull::slice_from_raw_parts(ptr, size)) + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + self.dealloc(ptr.as_ptr(), layout) + } +} diff --git a/src/alloc/unique.rs b/src/alloc/unique.rs new file mode 100644 index 000000000..970e0fb20 --- /dev/null +++ b/src/alloc/unique.rs @@ -0,0 +1,195 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +// +// Copyright (C) 2023 SUSE +// +// Authors: Carlos López + +use core::convert::From; +use core::fmt; +use core::marker::PhantomData; +use core::ptr::NonNull; + +/// A wrapper around a raw non-null `*mut T` that indicates that the possessor +/// of this wrapper owns the referent. Useful for building abstractions like +/// `Box`, `Vec`, `String`, and `HashMap`. +/// +/// Unlike `*mut T`, `SvsmUnique` behaves "as if" it were an instance of `T`. +/// It implements `Send`/`Sync` if `T` is `Send`/`Sync`. It also implies +/// the kind of strong aliasing guarantees an instance of `T` can expect: +/// the referent of the pointer should not be modified without a unique path to +/// its owning SvsmUnique. +/// +/// If you're uncertain of whether it's correct to use `SvsmUnique` for your purposes, +/// consider using `NonNull`, which has weaker semantics. +/// +/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer +/// is never dereferenced. This is so that enums may use this forbidden value +/// as a discriminant -- `Option>` has the same size as `SvsmUnique`. +/// However the pointer may still dangle if it isn't dereferenced. +/// +/// Unlike `*mut T`, `SvsmUnique` is covariant over `T`. This should always be correct +/// for any type which upholds SvsmUnique's aliasing requirements. +#[repr(transparent)] +// Lang item used experimentally by Miri to define the semantics of `SvsmUnique`. +pub struct SvsmUnique { + pointer: NonNull, + // NOTE: this marker has no consequences for variance, but is necessary + // for dropck to understand that we logically own a `T`. + // + // For details, see: + // https://github.com/rust-lang/rfcs/blob/master/text/0769-sound-generic-drop.md#phantom-data + _marker: PhantomData, +} + +/// `SvsmUnique` pointers are `Send` if `T` is `Send` because the data they +/// reference is unaliased. Note that this aliasing invariant is +/// unenforced by the type system; the abstraction using the +/// `SvsmUnique` must enforce it. +unsafe impl Send for SvsmUnique {} + +/// `SvsmUnique` pointers are `Sync` if `T` is `Sync` because the data they +/// reference is unaliased. Note that this aliasing invariant is +/// unenforced by the type system; the abstraction using the +/// `SvsmUnique` must enforce it. +unsafe impl Sync for SvsmUnique {} + +impl SvsmUnique { + /// Creates a new `SvsmUnique` that is dangling, but well-aligned. + /// + /// This is useful for initializing types which lazily allocate, like + /// `Vec::new` does. + /// + /// Note that the pointer value may potentially represent a valid pointer to + /// a `T`, which means this must not be used as a "not yet initialized" + /// sentinel value. Types that lazily allocate must track initialization by + /// some other means. + #[must_use] + #[inline] + pub const fn dangling() -> Self { + // FIXME(const-hack) replace with `From` + SvsmUnique { + pointer: NonNull::dangling(), + _marker: PhantomData, + } + } +} + +impl SvsmUnique { + /// Creates a new `SvsmUnique`. + /// + /// # Safety + /// + /// `ptr` must be non-null. + #[inline] + pub const unsafe fn new_unchecked(ptr: *mut T) -> Self { + // SAFETY: the caller must guarantee that `ptr` is non-null. + unsafe { + SvsmUnique { + pointer: NonNull::new_unchecked(ptr), + _marker: PhantomData, + } + } + } + + /// Creates a new `SvsmUnique` if `ptr` is non-null. + #[inline] + pub fn new(ptr: *mut T) -> Option { + NonNull::new(ptr).map(|pointer| SvsmUnique { + pointer, + _marker: PhantomData, + }) + } + + /// Acquires the underlying `*mut` pointer. + #[must_use = "`self` will be dropped if the result is not used"] + #[inline] + pub const fn as_ptr(self) -> *mut T { + self.pointer.as_ptr() + } + + /// Dereferences the content. + /// + /// The resulting lifetime is bound to self so this behaves "as if" + /// it were actually an instance of T that is getting borrowed. If a longer + /// (unbound) lifetime is needed, use `&*my_ptr.as_ptr()`. + #[must_use] + #[inline] + pub const unsafe fn as_ref(&self) -> &T { + // SAFETY: the caller must guarantee that `self` meets all the + // requirements for a reference. + unsafe { self.pointer.as_ref() } + } + + /// Mutably dereferences the content. + /// + /// The resulting lifetime is bound to self so this behaves "as if" + /// it were actually an instance of T that is getting borrowed. If a longer + /// (unbound) lifetime is needed, use `&mut *my_ptr.as_ptr()`. + #[must_use] + #[inline] + pub unsafe fn as_mut(&mut self) -> &mut T { + // SAFETY: the caller must guarantee that `self` meets all the + // requirements for a mutable reference. + unsafe { self.pointer.as_mut() } + } + + /// Casts to a pointer of another type. + #[must_use = "`self` will be dropped if the result is not used"] + #[inline] + pub const fn cast(self) -> SvsmUnique { + // FIXME(const-hack): replace with `From` + // SAFETY: is `NonNull` + unsafe { SvsmUnique::new_unchecked(self.pointer.cast().as_ptr()) } + } +} + +impl Clone for SvsmUnique { + #[inline] + fn clone(&self) -> Self { + *self + } +} + +impl Copy for SvsmUnique {} + +impl fmt::Debug for SvsmUnique { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Pointer::fmt(&self.as_ptr(), f) + } +} + +impl fmt::Pointer for SvsmUnique { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Pointer::fmt(&self.as_ptr(), f) + } +} + +impl From<&mut T> for SvsmUnique { + /// Converts a `&mut T` to a `SvsmUnique`. + /// + /// This conversion is infallible since references cannot be null. + #[inline] + fn from(reference: &mut T) -> Self { + Self::from(NonNull::from(reference)) + } +} + +impl From> for SvsmUnique { + /// Converts a `NonNull` to a `SvsmUnique`. + /// + /// This conversion is infallible since `NonNull` cannot be null. + #[inline] + fn from(pointer: NonNull) -> Self { + SvsmUnique { + pointer, + _marker: PhantomData, + } + } +} + +impl From> for NonNull { + #[inline] + fn from(unique: SvsmUnique) -> Self { + unique.pointer + } +} diff --git a/src/lib.rs b/src/lib.rs index fd0f3adf7..b1fbc4240 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -14,6 +14,7 @@ pub mod acpi; pub mod address; +pub mod alloc; pub mod config; pub mod console; pub mod cpu;