From 29c3333d96bf2f1c0a851b1970ac5371e8065540 Mon Sep 17 00:00:00 2001 From: Jalon Wong Date: Fri, 15 Aug 2025 12:29:26 -0500 Subject: [PATCH] Add the simplest heap --- .github/workflows/ci.yml | 1 + Cargo.toml | 8 +- examples/global_alloc.rs | 1 + examples/simplest_integration_test.rs | 88 +++++++++++++++++ src/lib.rs | 4 + src/simplest.rs | 135 ++++++++++++++++++++++++++ 6 files changed, 236 insertions(+), 1 deletion(-) create mode 100644 examples/simplest_integration_test.rs create mode 100644 src/simplest.rs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cc72680..c474a4b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -58,6 +58,7 @@ jobs: - run: qemu-system-arm --version - run: cargo run --target thumbv7m-none-eabi --example llff_integration_test --all-features - run: cargo run --target thumbv7m-none-eabi --example tlsf_integration_test --all-features + - run: cargo run --target thumbv7m-none-eabi --example simplest_integration_test --all-features clippy: name: Clippy diff --git a/Cargo.toml b/Cargo.toml index bcf197a..265ac14 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,8 +29,10 @@ allocator_api = [] # Use the Two-Level Segregated Fit allocator tlsf = ["rlsf", "const-default"] - # Use the LinkedList first-fit allocator +# Use the LinkedList first-fit allocator llff = ["linked_list_allocator"] +# Use the simplest allocator +simplest = [] [dependencies] critical-section = "1.0" @@ -56,6 +58,10 @@ required-features = ["allocator_api", "llff"] name = "tlsf_integration_test" required-features = ["allocator_api", "tlsf"] +[[example]] +name = "simplest_integration_test" +required-features = ["allocator_api", "simplest"] + [[example]] name = "global_alloc" required-features = ["llff"] diff --git a/examples/global_alloc.rs b/examples/global_alloc.rs index 81705fc..710c81a 100644 --- a/examples/global_alloc.rs +++ b/examples/global_alloc.rs @@ -10,6 +10,7 @@ use cortex_m_rt::entry; use embedded_alloc::LlffHeap as Heap; // Two-Level Segregated Fit Heap allocator (feature = "tlsf") // use embedded_alloc::TlsfHeap as Heap; +// use embedded_alloc::SimplestHeap as Heap; #[global_allocator] static HEAP: Heap = Heap::empty(); diff --git a/examples/simplest_integration_test.rs b/examples/simplest_integration_test.rs new file mode 100644 index 0000000..b5381ba --- /dev/null +++ b/examples/simplest_integration_test.rs @@ -0,0 +1,88 @@ +//! This is a very basic smoke test that runs in QEMU +//! Reference the QEMU section of the [Embedded Rust Book] for more information +//! +//! This only tests integration of the allocator on an embedded target. +//! Comprehensive allocator tests are located in the allocator dependency. +//! +//! After toolchain installation this test can be run with: +//! +//! ```bash +//! cargo +nightly run --target thumbv7m-none-eabi --example simplest_integration_test --all-features +//! ``` +//! +//! [Embedded Rust Book]: https://docs.rust-embedded.org/book/intro/index.html + +#![feature(allocator_api)] +#![no_main] +#![no_std] + +extern crate alloc; +extern crate panic_semihosting; + +use alloc::vec::Vec; +use core::mem::{size_of, MaybeUninit}; +use cortex_m_rt::entry; +use cortex_m_semihosting::{debug, hprintln}; +use embedded_alloc::SimplestHeap as Heap; + +#[global_allocator] +static HEAP: Heap = Heap::empty(); + +fn test_global_heap() { + assert_eq!(HEAP.used(), 0); + + let mut xs: Vec = alloc::vec![1]; + xs.push(2); + xs.extend(&[3, 4]); + + // do not optimize xs + core::hint::black_box(&mut xs); + + assert_eq!(xs.as_slice(), &[1, 2, 3, 4]); + assert!(HEAP.used() >= size_of::() * xs.len()); +} + +fn test_allocator_api() { + // small local heap + const HEAP_SIZE: usize = 16; + let mut heap_mem: [MaybeUninit; HEAP_SIZE] = [MaybeUninit::uninit(); HEAP_SIZE]; + let local_heap: Heap = Heap::empty(); + unsafe { local_heap.init(&raw mut heap_mem as usize, HEAP_SIZE) } + + assert_eq!(local_heap.used(), 0); + + let mut v: Vec = Vec::new_in(local_heap); + v.push(0xCAFE); + v.extend(&[0xDEAD, 0xFEED]); + + // do not optimize v + core::hint::black_box(&mut v); + + assert_eq!(v.as_slice(), &[0xCAFE, 0xDEAD, 0xFEED]); +} + +#[entry] +fn main() -> ! { + { + const HEAP_SIZE: usize = 1024; + static mut HEAP_MEM: [MaybeUninit; HEAP_SIZE] = [MaybeUninit::uninit(); HEAP_SIZE]; + unsafe { HEAP.init(&raw mut HEAP_MEM as usize, HEAP_SIZE) } + } + + #[allow(clippy::type_complexity)] + let tests: &[(fn() -> (), &'static str)] = &[ + (test_global_heap, "test_global_heap"), + (test_allocator_api, "test_allocator_api"), + ]; + + for (test_fn, test_name) in tests { + hprintln!("{}: start", test_name); + test_fn(); + hprintln!("{}: pass", test_name); + } + + // exit QEMU with a success status + debug::exit(debug::EXIT_SUCCESS); + #[allow(clippy::empty_loop)] + loop {} +} diff --git a/src/lib.rs b/src/lib.rs index 4308790..6f829de 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -5,10 +5,14 @@ #[cfg(feature = "llff")] mod llff; +#[cfg(feature = "simplest")] +mod simplest; #[cfg(feature = "tlsf")] mod tlsf; #[cfg(feature = "llff")] pub use llff::Heap as LlffHeap; +#[cfg(feature = "simplest")] +pub use simplest::Heap as SimplestHeap; #[cfg(feature = "tlsf")] pub use tlsf::Heap as TlsfHeap; diff --git a/src/simplest.rs b/src/simplest.rs new file mode 100644 index 0000000..894f662 --- /dev/null +++ b/src/simplest.rs @@ -0,0 +1,135 @@ +use core::alloc::{GlobalAlloc, Layout}; +use core::cell::RefCell; +use core::ptr; +use critical_section::Mutex; + +/// The simplest possible heap. +/// +/// # Safety +/// +/// This heap does **NOT** free allocated memory. +pub struct Heap { + heap: Mutex>, +} + +impl Heap { + /// Create a new UNINITIALIZED heap allocator + /// + /// You must initialize this heap using the + /// [`init`](Self::init) method before using the allocator. + pub const fn empty() -> Heap { + Heap { + heap: Mutex::new(RefCell::new(SimplestHeap::empty())), + } + } + + /// Initializes the heap + /// + /// This function must be called BEFORE you run any code that makes use of the + /// allocator. + /// + /// `start_addr` is the address where the heap will be located. + /// + /// `size` is the size of the heap in bytes. + /// + /// # Safety + /// + /// Obey these or Bad Stuff will happen. + /// + /// - This function must be called exactly ONCE. + /// - `size > 0` + pub unsafe fn init(&self, start_addr: usize, size: usize) { + critical_section::with(|cs| { + self.heap + .borrow(cs) + .borrow_mut() + .init(start_addr as *mut u8, size); + }); + } + + /// Returns an estimate of the amount of bytes in use. + pub fn used(&self) -> usize { + critical_section::with(|cs| self.heap.borrow(cs).borrow().used()) + } + + /// Returns an estimate of the amount of bytes available. + pub fn free(&self) -> usize { + critical_section::with(|cs| self.heap.borrow(cs).borrow().free()) + } +} + +unsafe impl GlobalAlloc for Heap { + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().alloc(layout)) + } + + unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {} +} + +#[cfg(feature = "allocator_api")] +mod allocator_api { + use super::*; + use core::alloc::{AllocError, Allocator}; + use core::ptr::NonNull; + + unsafe impl Allocator for Heap { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + match layout.size() { + 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)), + size => critical_section::with(|cs| { + let rst = NonNull::new(self.heap.borrow(cs).borrow_mut().alloc(layout)) + .ok_or(AllocError)?; + Ok(NonNull::slice_from_raw_parts(rst, size)) + }), + } + } + + unsafe fn deallocate(&self, _ptr: NonNull, _layout: Layout) {} + } +} + +struct SimplestHeap { + arena: *mut u8, + remaining: usize, + size: usize, +} + +unsafe impl Send for SimplestHeap {} + +impl SimplestHeap { + const fn empty() -> Self { + Self { + arena: ptr::null_mut(), + remaining: 0, + size: 0, + } + } + + fn init(&mut self, start_addr: *mut u8, size: usize) { + self.arena = start_addr; + self.remaining = size; + self.size = size; + } + + fn free(&self) -> usize { + self.remaining + } + + fn used(&self) -> usize { + self.size - self.remaining + } + + fn alloc(&mut self, layout: Layout) -> *mut u8 { + if layout.size() > self.remaining { + return ptr::null_mut(); + } + + // `Layout` contract forbids making a `Layout` with align=0, or align not power of 2. + // So we can safely use a mask to ensure alignment without worrying about UB. + let align_mask_to_round_down = !(layout.align() - 1); + + self.remaining -= layout.size(); + self.remaining &= align_mask_to_round_down; + self.arena.wrapping_add(self.remaining) + } +}