VYPR
High severityNVD Advisory· Published Sep 7, 2022· Updated Apr 22, 2025

linked_list_allocator vulnerable to out-of-bound writes on `Heap` initialization and `Heap::extend`

CVE-2022-36086

Description

linked_list_allocator is an allocator usable for no_std systems. Prior to version 0.10.2, the heap initialization methods were missing a minimum size check for the given heap size argument. This could lead to out-of-bound writes when a heap was initialized with a size smaller than 3 * size_of::<usize> because of metadata write operations. This vulnerability impacts all the initialization functions on the Heap and LockedHeap types, including Heap::new, Heap::init, Heap::init_from_slice, and LockedHeap::new. It also affects multiple uses of the Heap::extend method. Version 0.10.2 contains a patch for the issue. As a workaround, ensure that the heap is only initialized with a size larger than 3 * size_of::<usize> and that the Heap::extend method is only called with sizes larger than 2 * size_of::<usize>(). Also, ensure that the total heap size is (and stays) a multiple of 2 * size_of::<usize>().

Affected packages

Versions sourced from the GitHub Security Advisory.

PackageAffected versionsPatched versions
linked_list_allocatorcrates.io
< 0.10.20.10.2

Affected products

1

Patches

1
013b07586439

Merge pull request from GHSA-xg8p-34w2-j49j

https://github.com/rust-osdev/linked-list-allocatorPhilipp OppermannSep 6, 2022via ghsa
3 files changed · +284 38
  • src/hole.rs+153 13 modified
    @@ -4,7 +4,7 @@ use core::mem::{align_of, size_of};
     use core::ptr::null_mut;
     use core::ptr::NonNull;
     
    -use crate::align_up_size;
    +use crate::{align_down_size, align_up_size};
     
     use super::align_up;
     
    @@ -13,6 +13,7 @@ pub struct HoleList {
         pub(crate) first: Hole, // dummy
         pub(crate) bottom: *mut u8,
         pub(crate) top: *mut u8,
    +    pub(crate) pending_extend: u8,
     }
     
     pub(crate) struct Cursor {
    @@ -278,6 +279,7 @@ impl HoleList {
                 },
                 bottom: null_mut(),
                 top: null_mut(),
    +            pending_extend: 0,
             }
         }
     
    @@ -320,30 +322,52 @@ impl HoleList {
     
         /// Creates a `HoleList` that contains the given hole.
         ///
    +    /// The `hole_addr` pointer is automatically aligned, so the `bottom`
    +    /// field might be larger than the given `hole_addr`.
    +    ///
    +    /// The given `hole_size` must be large enough to store the required
    +    /// metadata, otherwise this function will panic. Depending on the
    +    /// alignment of the `hole_addr` pointer, the minimum size is between
    +    /// `2 * size_of::<usize>` and `3 * size_of::<usize>`.
    +    ///
    +    /// The usable size for allocations will be truncated to the nearest
    +    /// alignment of `align_of::<usize>`. Any extra bytes left at the end
    +    /// will be reclaimed once sufficient additional space is given to
    +    /// [`extend`][crate::Heap::extend].
    +    ///
         /// # Safety
         ///
         /// This function is unsafe because it creates a hole at the given `hole_addr`.
         /// This can cause undefined behavior if this address is invalid or if memory from the
         /// `[hole_addr, hole_addr+size)` range is used somewhere else.
    -    ///
    -    /// The pointer to `hole_addr` is automatically aligned.
         pub unsafe fn new(hole_addr: *mut u8, hole_size: usize) -> HoleList {
             assert_eq!(size_of::<Hole>(), Self::min_size());
    +        assert!(hole_size >= size_of::<Hole>());
     
             let aligned_hole_addr = align_up(hole_addr, align_of::<Hole>());
    +        let requested_hole_size = hole_size - ((aligned_hole_addr as usize) - (hole_addr as usize));
    +        let aligned_hole_size = align_down_size(requested_hole_size, align_of::<Hole>());
    +        assert!(aligned_hole_size >= size_of::<Hole>());
    +
             let ptr = aligned_hole_addr as *mut Hole;
             ptr.write(Hole {
    -            size: hole_size - ((aligned_hole_addr as usize) - (hole_addr as usize)),
    +            size: aligned_hole_size,
                 next: None,
             });
     
    +        assert_eq!(
    +            hole_addr.wrapping_add(hole_size),
    +            aligned_hole_addr.wrapping_add(requested_hole_size)
    +        );
    +
             HoleList {
                 first: Hole {
                     size: 0,
                     next: Some(NonNull::new_unchecked(ptr)),
                 },
                 bottom: aligned_hole_addr,
    -            top: hole_addr.wrapping_add(hole_size),
    +            top: aligned_hole_addr.wrapping_add(aligned_hole_size),
    +            pending_extend: (requested_hole_size - aligned_hole_size) as u8,
             }
         }
     
    @@ -428,6 +452,44 @@ impl HoleList {
                 })
             })
         }
    +
    +    pub(crate) unsafe fn extend(&mut self, by: usize) {
    +        assert!(!self.top.is_null(), "tried to extend an empty heap");
    +
    +        let top = self.top;
    +
    +        let dead_space = top.align_offset(align_of::<Hole>());
    +        debug_assert_eq!(
    +            0, dead_space,
    +            "dead space detected during extend: {} bytes. This means top was unaligned",
    +            dead_space
    +        );
    +
    +        debug_assert!(
    +            (self.pending_extend as usize) < Self::min_size(),
    +            "pending extend was larger than expected"
    +        );
    +
    +        // join this extend request with any pending (but not yet acted on) extension
    +        let extend_by = self.pending_extend as usize + by;
    +
    +        let minimum_extend = Self::min_size();
    +        if extend_by < minimum_extend {
    +            self.pending_extend = extend_by as u8;
    +            return;
    +        }
    +
    +        // only extend up to another valid boundary
    +        let new_hole_size = align_down_size(extend_by, align_of::<Hole>());
    +        let layout = Layout::from_size_align(new_hole_size, 1).unwrap();
    +
    +        // instantiate the hole by forcing a deallocation on the new memory
    +        self.deallocate(NonNull::new_unchecked(top as *mut u8), layout);
    +        self.top = top.add(new_hole_size);
    +
    +        // save extra bytes given to extend that weren't aligned to the hole size
    +        self.pending_extend = (extend_by - new_hole_size) as u8;
    +    }
     }
     
     unsafe fn make_hole(addr: *mut u8, size: usize) -> NonNull<Hole> {
    @@ -505,7 +567,10 @@ impl Cursor {
             // Does hole overlap node?
             assert!(
                 hole_u8.wrapping_add(hole_size) <= node_u8,
    -            "Freed node aliases existing hole! Bad free?",
    +            "Freed node ({:?}) aliases existing hole ({:?}[{}])! Bad free?",
    +            node_u8,
    +            hole_u8,
    +            hole_size,
             );
     
             // All good! Let's insert that after.
    @@ -630,10 +695,10 @@ fn deallocate(list: &mut HoleList, addr: *mut u8, size: usize) {
     
     #[cfg(test)]
     pub mod test {
    -    use crate::Heap;
    -    use core::alloc::Layout;
    -    use std::mem::MaybeUninit;
    -    use std::prelude::v1::*;
    +    use super::HoleList;
    +    use crate::{align_down_size, Heap};
    +    use core::mem::size_of;
    +    use std::{alloc::Layout, convert::TryInto, mem::MaybeUninit, prelude::v1::*, ptr::NonNull};
     
         #[repr(align(128))]
         struct Chonk<const N: usize> {
    @@ -655,8 +720,8 @@ pub mod test {
             let assumed_location = data.as_mut_ptr().cast();
     
             let heap = Heap::from_slice(data);
    -        assert!(heap.bottom() == assumed_location);
    -        assert!(heap.size() == HEAP_SIZE);
    +        assert_eq!(heap.bottom(), assumed_location);
    +        assert_eq!(heap.size(), align_down_size(HEAP_SIZE, size_of::<usize>()));
             heap
         }
     
    @@ -667,7 +732,10 @@ pub mod test {
             // This is the "dummy" node
             assert_eq!(curs.previous().size, 0);
             // This is the "full" heap
    -        assert_eq!(curs.current().size, 1000);
    +        assert_eq!(
    +            curs.current().size,
    +            align_down_size(1000, size_of::<usize>())
    +        );
             // There is no other hole
             assert!(curs.next().is_none());
         }
    @@ -678,4 +746,76 @@ pub mod test {
             let reqd = Layout::from_size_align(256, 1).unwrap();
             let _ = heap.allocate_first_fit(reqd).unwrap();
         }
    +
    +    /// Tests `HoleList::new` with the minimal allowed `hole_size`.
    +    #[test]
    +    fn hole_list_new_min_size() {
    +        // define an array of `u64` instead of `u8` for alignment
    +        static mut HEAP: [u64; 2] = [0; 2];
    +        let heap =
    +            unsafe { HoleList::new(HEAP.as_mut_ptr().cast(), 2 * core::mem::size_of::<usize>()) };
    +        assert_eq!(heap.bottom.cast(), unsafe { HEAP.as_mut_ptr() });
    +        assert_eq!(heap.top.cast(), unsafe { HEAP.as_mut_ptr().add(2) });
    +        assert_eq!(heap.first.size, 0); // dummy
    +        assert_eq!(
    +            heap.first.next,
    +            Some(NonNull::new(heap.bottom.cast())).unwrap()
    +        );
    +        assert_eq!(
    +            unsafe { &*(heap.first.next.unwrap().as_ptr()) }.size,
    +            2 * core::mem::size_of::<usize>()
    +        );
    +        assert_eq!(unsafe { &*(heap.first.next.unwrap().as_ptr()) }.next, None);
    +    }
    +
    +    /// Tests that `HoleList::new` aligns the `hole_addr` correctly and adjusts the size
    +    /// accordingly.
    +    #[test]
    +    fn hole_list_new_align() {
    +        // define an array of `u64` instead of `u8` for alignment
    +        static mut HEAP: [u64; 3] = [0; 3];
    +
    +        let heap_start: *mut u8 = unsafe { HEAP.as_mut_ptr().add(1) }.cast();
    +        // initialize the HoleList with a hole_addr one byte before `heap_start`
    +        // -> the function should align it up to `heap_start`
    +        let heap =
    +            unsafe { HoleList::new(heap_start.sub(1), 2 * core::mem::size_of::<usize>() + 1) };
    +        assert_eq!(heap.bottom, heap_start);
    +        assert_eq!(heap.top.cast(), unsafe {
    +            // one byte less than the `hole_size` given to `new` because of alignment
    +            heap_start.add(2 * core::mem::size_of::<usize>())
    +        });
    +
    +        assert_eq!(heap.first.size, 0); // dummy
    +        assert_eq!(
    +            heap.first.next,
    +            Some(NonNull::new(heap.bottom.cast())).unwrap()
    +        );
    +        assert_eq!(
    +            unsafe { &*(heap.first.next.unwrap().as_ptr()) }.size,
    +            unsafe { heap.top.offset_from(heap.bottom) }
    +                .try_into()
    +                .unwrap()
    +        );
    +        assert_eq!(unsafe { &*(heap.first.next.unwrap().as_ptr()) }.next, None);
    +    }
    +
    +    #[test]
    +    #[should_panic]
    +    fn hole_list_new_too_small() {
    +        // define an array of `u64` instead of `u8` for alignment
    +        static mut HEAP: [u64; 3] = [0; 3];
    +
    +        let heap_start: *mut u8 = unsafe { HEAP.as_mut_ptr().add(1) }.cast();
    +        // initialize the HoleList with a hole_addr one byte before `heap_start`
    +        // -> the function should align it up to `heap_start`, but then the
    +        // available size is too small to store a hole -> it should panic
    +        unsafe { HoleList::new(heap_start.sub(1), 2 * core::mem::size_of::<usize>()) };
    +    }
    +
    +    #[test]
    +    #[should_panic]
    +    fn extend_empty() {
    +        unsafe { HoleList::empty().extend(16) };
    +    }
     }
    
  • src/lib.rs+77 19 modified
    @@ -59,6 +59,19 @@ impl Heap {
     
         /// Initializes an empty heap
         ///
    +    /// The `heap_bottom` pointer is automatically aligned, so the [`bottom()`][Self::bottom]
    +    /// method might return a pointer that is larger than `heap_bottom` after construction.
    +    ///
    +    /// The given `heap_size` must be large enough to store the required
    +    /// metadata, otherwise this function will panic. Depending on the
    +    /// alignment of the `hole_addr` pointer, the minimum size is between
    +    /// `2 * size_of::<usize>` and `3 * size_of::<usize>`.
    +    ///
    +    /// The usable size for allocations will be truncated to the nearest
    +    /// alignment of `align_of::<usize>`. Any extra bytes left at the end
    +    /// will be reclaimed once sufficient additional space is given to
    +    /// [`extend`][Heap::extend].
    +    ///
         /// # Safety
         ///
         /// This function must be called at most once and must only be used on an
    @@ -82,13 +95,22 @@ impl Heap {
         /// program's memory, from a mutable static, or by allocating and leaking such memory from
         /// another allocator.
         ///
    -    /// The latter method may be especially useful if the underlying allocator does not perform
    +    /// The latter approach may be especially useful if the underlying allocator does not perform
         /// deallocation (e.g. a simple bump allocator). Then the overlaid linked-list-allocator can
         /// provide memory reclamation.
         ///
    +    /// The usable size for allocations will be truncated to the nearest
    +    /// alignment of `align_of::<usize>`. Any extra bytes left at the end
    +    /// will be reclaimed once sufficient additional space is given to
    +    /// [`extend`][Heap::extend].
    +    ///
         /// # Panics
         ///
         /// This method panics if the heap is already initialized.
    +    ///
    +    /// It also panics when the length of the given `mem` slice is not large enough to
    +    /// store the required metadata. Depending on the alignment of the slice, the minimum
    +    /// size is between `2 * size_of::<usize>` and `3 * size_of::<usize>`.
         pub fn init_from_slice(&mut self, mem: &'static mut [MaybeUninit<u8>]) {
             assert!(
                 self.bottom().is_null(),
    @@ -106,6 +128,19 @@ impl Heap {
     
         /// Creates a new heap with the given `bottom` and `size`.
         ///
    +    /// The `heap_bottom` pointer is automatically aligned, so the [`bottom()`][Self::bottom]
    +    /// method might return a pointer that is larger than `heap_bottom` after construction.
    +    ///
    +    /// The given `heap_size` must be large enough to store the required
    +    /// metadata, otherwise this function will panic. Depending on the
    +    /// alignment of the `hole_addr` pointer, the minimum size is between
    +    /// `2 * size_of::<usize>` and `3 * size_of::<usize>`.
    +    ///
    +    /// The usable size for allocations will be truncated to the nearest
    +    /// alignment of `align_of::<usize>`. Any extra bytes left at the end
    +    /// will be reclaimed once sufficient additional space is given to
    +    /// [`extend`][Heap::extend].
    +    ///
         /// # Safety
         ///
         /// The bottom address must be valid and the memory in the
    @@ -115,20 +150,17 @@ impl Heap {
         ///
         /// The provided memory range must be valid for the `'static` lifetime.
         pub unsafe fn new(heap_bottom: *mut u8, heap_size: usize) -> Heap {
    -        if heap_size < HoleList::min_size() {
    -            Self::empty()
    -        } else {
    -            Heap {
    -                used: 0,
    -                holes: HoleList::new(heap_bottom, heap_size),
    -            }
    +        Heap {
    +            used: 0,
    +            holes: HoleList::new(heap_bottom, heap_size),
             }
         }
     
         /// Creates a new heap from a slice of raw memory.
         ///
    -    /// This has the same effect as [`init_from_slice`] on an empty heap, but it is combined into a
    -    /// single operation that can not panic.
    +    /// This is a convenience function that has the same effect as calling
    +    /// [`init_from_slice`] on an empty heap. All the requirements of `init_from_slice`
    +    /// apply to this function as well.
         pub fn from_slice(mem: &'static mut [MaybeUninit<u8>]) -> Heap {
             let size = mem.len();
             let address = mem.as_mut_ptr().cast();
    @@ -172,18 +204,29 @@ impl Heap {
         }
     
         /// Returns the bottom address of the heap.
    +    ///
    +    /// The bottom pointer is automatically aligned, so the returned pointer
    +    /// might be larger than the bottom pointer used for initialization.
         pub fn bottom(&self) -> *mut u8 {
             self.holes.bottom
         }
     
         /// Returns the size of the heap.
    +    ///
    +    /// This is the size the heap is using for allocations, not necessarily the
    +    /// total amount of bytes given to the heap. To determine the exact memory
    +    /// boundaries, use [`bottom`][Self::bottom] and [`top`][Self::top].
         pub fn size(&self) -> usize {
    -        (self.top() as usize) - (self.bottom() as usize)
    +        unsafe { self.holes.top.offset_from(self.holes.bottom) as usize }
         }
     
    -    /// Return the top address of the heap
    +    /// Return the top address of the heap.
    +    ///
    +    /// Note: The heap may choose to not use bytes at the end for allocations
    +    /// until there is enough room for metadata, but it still retains ownership
    +    /// over memory from [`bottom`][Self::bottom] to the address returned.
         pub fn top(&self) -> *mut u8 {
    -        self.holes.top
    +        unsafe { self.holes.top.add(self.holes.pending_extend as usize) }
         }
     
         /// Returns the size of the used part of the heap
    @@ -196,19 +239,26 @@ impl Heap {
             self.size() - self.used
         }
     
    -    /// Extends the size of the heap by creating a new hole at the end
    +    /// Extends the size of the heap by creating a new hole at the end.
    +    ///
    +    /// Small extensions are not guaranteed to grow the usable size of
    +    /// the heap. In order to grow the Heap most effectively, extend by
    +    /// at least `2 * size_of::<usize>`, keeping the amount a multiple of
    +    /// `size_of::<usize>`.
    +    ///
    +    /// Calling this method on an uninitialized Heap will panic.
         ///
         /// # Safety
         ///
         /// The amount of data given in `by` MUST exist directly after the original
         /// range of data provided when constructing the [Heap]. The additional data
         /// must have the same lifetime of the original range of data.
    +    ///
    +    /// Even if this operation doesn't increase the [usable size][`Self::size`]
    +    /// by exactly `by` bytes, those bytes are still owned by the Heap for
    +    /// later use.
         pub unsafe fn extend(&mut self, by: usize) {
    -        let top = self.top();
    -        let layout = Layout::from_size_align(by, 1).unwrap();
    -        self.holes
    -            .deallocate(NonNull::new_unchecked(top as *mut u8), layout);
    -        self.holes.top = self.holes.top.add(by);
    +        self.holes.extend(by);
         }
     }
     
    @@ -250,6 +300,14 @@ impl LockedHeap {
     
         /// Creates a new heap with the given `bottom` and `size`.
         ///
    +    /// The `heap_bottom` pointer is automatically aligned, so the [`bottom()`][Heap::bottom]
    +    /// method might return a pointer that is larger than `heap_bottom` after construction.
    +    ///
    +    /// The given `heap_size` must be large enough to store the required
    +    /// metadata, otherwise this function will panic. Depending on the
    +    /// alignment of the `hole_addr` pointer, the minimum size is between
    +    /// `2 * size_of::<usize>` and `3 * size_of::<usize>`.
    +    ///
         /// # Safety
         ///
         /// The bottom address must be valid and the memory in the
    
  • src/test.rs+54 6 modified
    @@ -23,8 +23,8 @@ fn new_heap() -> Heap {
         let assumed_location = data.as_mut_ptr().cast();
     
         let heap = Heap::from_slice(data);
    -    assert!(heap.bottom() == assumed_location);
    -    assert!(heap.size() == HEAP_SIZE);
    +    assert_eq!(heap.bottom(), assumed_location);
    +    assert_eq!(heap.size(), align_down_size(HEAP_SIZE, size_of::<usize>()));
         heap
     }
     
    @@ -37,8 +37,8 @@ fn new_max_heap() -> Heap {
     
         // Unsafe so that we have provenance over the whole allocation.
         let heap = unsafe { Heap::new(start_ptr, HEAP_SIZE) };
    -    assert!(heap.bottom() == start_ptr);
    -    assert!(heap.size() == HEAP_SIZE);
    +    assert_eq!(heap.bottom(), start_ptr);
    +    assert_eq!(heap.size(), HEAP_SIZE);
         heap
     }
     
    @@ -196,11 +196,12 @@ fn allocate_many_size_aligns() {
         const STRATS: Range<usize> = 0..4;
     
         let mut heap = new_heap();
    -    assert_eq!(heap.size(), 1000);
    +    let aligned_heap_size = align_down_size(1000, size_of::<usize>());
    +    assert_eq!(heap.size(), aligned_heap_size);
     
         heap.holes.debug();
     
    -    let max_alloc = Layout::from_size_align(1000, 1).unwrap();
    +    let max_alloc = Layout::from_size_align(aligned_heap_size, 1).unwrap();
         let full = heap.allocate_first_fit(max_alloc).unwrap();
         unsafe {
             heap.deallocate(full, max_alloc);
    @@ -495,3 +496,50 @@ fn extend_fragmented_heap() {
         // Try to allocate there
         assert!(heap.allocate_first_fit(layout_2.clone()).is_ok());
     }
    +
    +/// Ensures that `Heap::extend` fails for very small sizes.
    +///
    +/// The size needs to be big enough to hold a hole, otherwise
    +/// the hole write would result in an out of bounds write.
    +#[test]
    +fn small_heap_extension() {
    +    // define an array of `u64` instead of `u8` for alignment
    +    static mut HEAP: [u64; 5] = [0; 5];
    +    unsafe {
    +        let mut heap = Heap::new(HEAP.as_mut_ptr().cast(), 32);
    +        heap.extend(1);
    +        assert_eq!(1, heap.holes.pending_extend);
    +    }
    +}
    +
    +/// Ensures that `Heap::extend` fails for sizes that are not a multiple of the hole size.
    +#[test]
    +fn oddly_sized_heap_extension() {
    +    // define an array of `u64` instead of `u8` for alignment
    +    static mut HEAP: [u64; 5] = [0; 5];
    +    unsafe {
    +        let mut heap = Heap::new(HEAP.as_mut_ptr().cast(), 16);
    +        heap.extend(17);
    +        assert_eq!(1, heap.holes.pending_extend);
    +        assert_eq!(16 + 16, heap.size());
    +    }
    +}
    +
    +/// Ensures that heap extension fails when trying to extend an oddly-sized heap.
    +///
    +/// To extend the heap, we need to place a hole at the old top of the heap. This
    +/// only works if the top pointer is sufficiently aligned.
    +#[test]
    +fn extend_odd_size() {
    +    // define an array of `u64` instead of `u8` for alignment
    +    static mut HEAP: [u64; 5] = [0; 5];
    +    unsafe {
    +        let mut heap = Heap::new(HEAP.as_mut_ptr().cast(), 17);
    +        assert_eq!(1, heap.holes.pending_extend);
    +        heap.extend(16);
    +        assert_eq!(1, heap.holes.pending_extend);
    +        heap.extend(15);
    +        assert_eq!(0, heap.holes.pending_extend);
    +        assert_eq!(17 + 16 + 15, heap.size());
    +    }
    +}
    

Vulnerability mechanics

Generated by null/stub on May 9, 2026. Inputs: CWE entries + fix-commit diffs from this CVE's patches. Citations validated against bundle.

References

5

News mentions

0

No linked articles in our index yet.