Skip to content

Commit

Permalink
refine code
Browse files Browse the repository at this point in the history
  • Loading branch information
ClawSeven committed Nov 30, 2023
1 parent 4ba6592 commit 47b3996
Show file tree
Hide file tree
Showing 10 changed files with 138 additions and 214 deletions.
43 changes: 30 additions & 13 deletions sgx_trts/src/emm/alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,13 @@ use intrusive_collections::singly_linked_list::{Link, SinglyLinkedList};
use intrusive_collections::UnsafeRef;
use sgx_tlibc_sys::ENOMEM;

use crate::sync::SpinMutex as Mutex;
use core::alloc::{AllocError, Allocator, Layout};
use core::any::Any;
use core::mem::size_of;
use core::mem::transmute;
use core::mem::MaybeUninit;
use core::ptr::NonNull;
use spin::{Mutex, Once};
use spin::Once;

use super::ema::EmaOptions;
use super::page::AllocFlags;
Expand Down Expand Up @@ -80,6 +81,10 @@ pub fn init_reserve_alloc() {
RSRV_ALLOCATOR.call_once(|| Mutex::new(Reserve::new(INIT_MEM_SIZE)));
}

pub trait EmmAllocator: Allocator + Any {
fn as_any(&self) -> &dyn Any;
}

/// AllocType layout memory from reserve memory region
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct RsrvAlloc;
Expand All @@ -102,6 +107,12 @@ unsafe impl Allocator for RsrvAlloc {
}
}

impl EmmAllocator for RsrvAlloc {
fn as_any(&self) -> &dyn Any {
self
}
}

/// AllocType layout memory from static memory region
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct StaticAlloc;
Expand All @@ -123,23 +134,29 @@ unsafe impl Allocator for StaticAlloc {
}
}

impl EmmAllocator for StaticAlloc {
fn as_any(&self) -> &dyn Any {
self
}
}

// Enum for allocator types
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum AllocType {
Static(StaticAlloc),
Reserve(RsrvAlloc),
Static,
Reserve,
}

impl AllocType {
pub fn new_static() -> Self {
Self::Static(StaticAlloc)
}

pub fn new_rsrv() -> Self {
Self::Reserve(RsrvAlloc)
pub fn alloctor(&self) -> &'static dyn EmmAllocator {
match self {
AllocType::Static => &StaticAlloc,
AllocType::Reserve => &RsrvAlloc,
}
}
}

// Chunk manages memory range.
// The Chunk structure is filled into the layout before the base pointer.
#[derive(Debug)]
Expand Down Expand Up @@ -252,7 +269,7 @@ impl Reserve {
for block in &mut exact_blocks {
block.write(SinglyLinkedList::new(BlockFreeAda::new()));
}
unsafe { transmute(exact_blocks) }
unsafe { MaybeUninit::array_assume_init(exact_blocks) }
};

let mut reserve = Self {
Expand Down Expand Up @@ -478,7 +495,7 @@ impl Reserve {
typ: PageType::None,
prot: ProtFlags::NONE,
})
.alloc(AllocType::new_static());
.alloc(AllocType::Static);
let base = vmmgr.alloc(&options, RangeType::User)?;

let mut options = EmaOptions::new(
Expand All @@ -487,7 +504,7 @@ impl Reserve {
AllocFlags::COMMIT_ON_DEMAND | AllocFlags::FIXED,
);

options.alloc(AllocType::new_static());
options.alloc(AllocType::Static);
let base = vmmgr.alloc(&options, RangeType::User)?;

vmmgr.commit(base, rsize)?;
Expand Down
89 changes: 27 additions & 62 deletions sgx_trts/src/emm/bitmap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,13 @@

use alloc::boxed::Box;
use alloc::vec;
use core::alloc::Allocator;
use core::alloc::Layout;
use core::ptr::NonNull;
use sgx_tlibc_sys::EACCES;
use sgx_types::error::OsResult;

use crate::emm::alloc::EmmAllocator;
use crate::emm::alloc::RsrvAlloc;
use crate::emm::alloc::StaticAlloc;

use super::alloc::AllocType;

const BYTE_SIZE: usize = 8;
Expand All @@ -32,13 +33,11 @@ macro_rules! bytes_num {
};
}

#[repr(C)]
#[derive(Debug)]
pub struct BitArray {
bits: usize,
bytes: usize,
data: *mut u8,
alloc: AllocType,
data: Box<[u8], &'static dyn EmmAllocator>,
}

impl BitArray {
Expand All @@ -47,24 +46,9 @@ impl BitArray {
let bytes = bytes_num!(bits);

// FIXME: return error if OOM
let data = match alloc {
AllocType::Reserve(allocator) => {
// Set bits to all zeros
let data = vec::from_elem_in(0_u8, bytes, allocator).into_boxed_slice();
Box::into_raw(data) as *mut u8
}
AllocType::Static(allocator) => {
let data = vec::from_elem_in(0_u8, bytes, allocator).into_boxed_slice();
Box::into_raw(data) as *mut u8
}
};

Ok(Self {
bits,
bytes,
data,
alloc,
})
let data = vec::from_elem_in(0_u8, bytes, alloc.alloctor()).into_boxed_slice();

Ok(Self { bits, bytes, data })
}

/// Get the value of the bit at a given index
Expand All @@ -76,8 +60,7 @@ impl BitArray {
let byte_index = index / BYTE_SIZE;
let bit_index = index % BYTE_SIZE;
let bit_mask = 1 << bit_index;
let data = unsafe { core::slice::from_raw_parts_mut(self.data, self.bytes) };
Ok((data.get(byte_index).unwrap() & bit_mask) != 0)
Ok((self.data.get(byte_index).unwrap() & bit_mask) != 0)
}

/// Check whether all bits are set true
Expand All @@ -99,26 +82,33 @@ impl BitArray {
let bit_index = index % BYTE_SIZE;
let bit_mask = 1 << bit_index;

let data = unsafe { core::slice::from_raw_parts_mut(self.data, self.bytes) };

if value {
data[byte_index] |= bit_mask;
self.data[byte_index] |= bit_mask;
} else {
data[byte_index] &= !bit_mask;
self.data[byte_index] &= !bit_mask;
}
Ok(())
}

/// Set all the bits to true
pub fn set_full(&mut self) {
let data = unsafe { core::slice::from_raw_parts_mut(self.data, self.bytes) };
data.fill(0xFF);
self.data.fill(0xFF);
}

/// Clear all the bits
pub fn clear(&mut self) {
let data = unsafe { core::slice::from_raw_parts_mut(self.data, self.bytes) };
data.fill(0);
self.data.fill(0);
}

fn alloc_type(&self) -> AllocType {
let allocator = *Box::allocator(&self.data);
if allocator.as_any().downcast_ref::<RsrvAlloc>().is_some() {
AllocType::Reserve
} else if allocator.as_any().downcast_ref::<StaticAlloc>().is_some() {
AllocType::Static
} else {
panic!()
}
}

/// Split current bit array at specified position, return a new allocated bit array
Expand All @@ -136,11 +126,10 @@ impl BitArray {
let rbits = self.bits - lbits;
let rbytes = bytes_num!(rbits);

let rarray = Self::new(rbits, self.alloc)?;

let rdata = unsafe { core::slice::from_raw_parts_mut(rarray.data, rarray.bytes) };
let ldata = unsafe { core::slice::from_raw_parts_mut(self.data, self.bytes) };
let mut rarray = Self::new(rbits, self.alloc_type())?;

let rdata = &mut rarray.data;
let ldata = &mut self.data;
for (idx, item) in rdata[..(rbytes - 1)].iter_mut().enumerate() {
// current byte index in previous bit_array
let curr_idx = idx + byte_index;
Expand All @@ -156,27 +145,3 @@ impl BitArray {
Ok(rarray)
}
}

impl Drop for BitArray {
fn drop(&mut self) {
match self.alloc {
AllocType::Reserve(allocator) => {
// Layout is redundant since interior allocator maintains the allocated size.
// Besides, if the bitmap is splitted, the recorded size
// in bitmap is not corresponding to allocated layout.
let fake_layout: Layout = Layout::new::<u8>();
unsafe {
let data_ptr = NonNull::new_unchecked(self.data);
allocator.deallocate(data_ptr, fake_layout);
}
}
AllocType::Static(allocator) => {
let fake_layout: Layout = Layout::new::<u8>();
unsafe {
let data_ptr = NonNull::new_unchecked(self.data);
allocator.deallocate(data_ptr, fake_layout);
}
}
}
}
}
Loading

0 comments on commit 47b3996

Please sign in to comment.