morpheus_network/alloc_heap.rs
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163
//! Post-EBS Global Allocator
//!
//! Uses linked_list_allocator for a battle-tested, no_std heap.
//! Initialized from a static buffer after ExitBootServices.
//!
//! # Architecture
//!
//! ```text
//! ┌─────────────────────────────────────────────────────────────┐
//! │ Static Heap Buffer │
//! │ (1MB default) │
//! │ │
//! │ ┌─────────────────────────────────────────────────────┐ │
//! │ │ linked_list_allocator::Heap │ │
//! │ │ │ │
//! │ │ Free List: [block] -> [block] -> [block] -> ... │ │
//! │ │ │ │
//! │ └─────────────────────────────────────────────────────┘ │
//! └─────────────────────────────────────────────────────────────┘
//! ```
//!
//! # Usage
//!
//! ```ignore
//! // In bare_metal_main, before any allocations:
//! unsafe { crate::alloc::init_heap(); }
//!
//! // Now Vec, Box, String all work:
//! let v = vec![1, 2, 3];
//! let s = String::from("hello");
//! ```
//!
//! # Feature Flags
//!
//! - `post_ebs_allocator`: Enable `#[global_allocator]` attribute.
//! Only enable this when running standalone post-EBS, not when used
//! as a library by the bootloader (which has its own UEFI allocator).
//!
//! # Safety
//!
//! - `init_heap()` must be called exactly ONCE before any allocations
//! - Must be called after ExitBootServices (UEFI allocator is gone)
//! - Thread-safety: Uses spin lock internally (safe for single-core post-EBS)
use core::alloc::{GlobalAlloc, Layout};
use core::ptr::NonNull;
use linked_list_allocator::Heap;
/// Heap size: 1MB - sufficient for FAT32 ops, manifest handling, etc.
/// Can be increased if needed.
const HEAP_SIZE: usize = 1024 * 1024;
/// Page-aligned heap buffer wrapper
#[repr(C, align(4096))]
struct AlignedHeapBuffer([u8; HEAP_SIZE]);
/// Static heap buffer - lives in .bss, zero-initialized
static mut HEAP_BUFFER: AlignedHeapBuffer = AlignedHeapBuffer([0u8; HEAP_SIZE]);
/// Locked heap wrapper implementing GlobalAlloc
pub struct LockedHeap {
inner: spin::Mutex<Heap>,
}
impl LockedHeap {
/// Create an empty (uninitialized) heap
pub const fn empty() -> Self {
Self {
inner: spin::Mutex::new(Heap::empty()),
}
}
/// Initialize the heap with a memory region
///
/// # Safety
/// - Must be called exactly once
/// - Memory region must be valid and not used elsewhere
pub unsafe fn init(&self, start: *mut u8, size: usize) {
self.inner.lock().init(start, size);
}
}
unsafe impl GlobalAlloc for LockedHeap {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
self.inner
.lock()
.allocate_first_fit(layout)
.map(|nn| nn.as_ptr())
.unwrap_or(core::ptr::null_mut())
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
if let Some(nn) = NonNull::new(ptr) {
self.inner.lock().deallocate(nn, layout);
}
}
}
// Only use custom allocator in actual no_std builds, not host tests
#[cfg(not(test))]
#[global_allocator]
static GLOBAL: LockedHeap = LockedHeap::empty();
// For test builds, we still need the static but it's not the global allocator
#[cfg(test)]
static GLOBAL: LockedHeap = LockedHeap::empty();
/// Track if heap is already initialized
static mut HEAP_INITIALIZED: bool = false;
/// Initialize the heap allocator
///
/// Safe to call multiple times - only initializes once.
/// Should be called as early as possible (start of efi_main).
///
/// # Safety
/// - Must be called BEFORE any allocations (Vec, Box, String, etc.)
/// - Thread-safety: Uses static bool guard, safe for single-core
pub unsafe fn init_heap() {
if HEAP_INITIALIZED {
return; // Already initialized
}
// Use raw pointer to avoid creating mutable reference to static
let heap_start = (&raw mut HEAP_BUFFER).cast::<u8>();
let heap_size = HEAP_SIZE;
GLOBAL.init(heap_start, heap_size);
HEAP_INITIALIZED = true;
}
/// Check if heap is initialized
pub fn is_initialized() -> bool {
unsafe { HEAP_INITIALIZED }
}
/// Get heap statistics for debugging
pub fn heap_stats() -> HeapStats {
let heap = GLOBAL.inner.lock();
HeapStats {
total_size: HEAP_SIZE,
used: heap.used(),
free: heap.free(),
}
}
/// Heap statistics
#[derive(Debug, Clone, Copy)]
pub struct HeapStats {
pub total_size: usize,
pub used: usize,
pub free: usize,
}
impl HeapStats {
/// Get usage percentage
pub fn usage_percent(&self) -> u8 {
if self.total_size == 0 {
return 0;
}
((self.used * 100) / self.total_size) as u8
}
}