diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml index 71c7e5eb12..023c3ae402 100644 --- a/fuzz/Cargo.toml +++ b/fuzz/Cargo.toml @@ -44,3 +44,9 @@ name = "page_alloc" path = "fuzz_targets/page_alloc.rs" test = false doc = false + +[[bin]] +name = "alloc" +path = "fuzz_targets/alloc.rs" +test = false +doc = false diff --git a/fuzz/fuzz_targets/alloc.rs b/fuzz/fuzz_targets/alloc.rs new file mode 100644 index 0000000000..7f5dc0f324 --- /dev/null +++ b/fuzz/fuzz_targets/alloc.rs @@ -0,0 +1,154 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +// +// Copyright (c) 2023 SUSE LLC +// +// Author: Carlos López + +#![no_main] + +use arbitrary::Arbitrary; +use core::alloc::{GlobalAlloc, Layout, LayoutError}; +use core::num::NonZeroUsize; +use libfuzzer_sys::fuzz_target; +use svsm::mm::alloc::{SvsmAllocator, TestRootMem}; + +const MIN_ROOT_MEM_SIZE: usize = 0x8000; +const MAX_ROOT_MEM_SIZE: usize = 0x100000; + +#[inline] +fn adjust_mem_size(size: usize) -> usize { + MIN_ROOT_MEM_SIZE + (size % (MAX_ROOT_MEM_SIZE - MIN_ROOT_MEM_SIZE + 1)) +} + +#[derive(Arbitrary, Debug)] +struct FuzzLayout { + size: usize, + align: usize, +} + +impl TryFrom for Layout { + type Error = LayoutError; + + fn try_from(ly: FuzzLayout) -> Result { + Self::from_size_align(ly.size, ly.align) + } +} + +/// A wrapper around SvsmAllocator that marks memory as initialized or +/// uninitialized on allocation and deallocation respectively. +struct PoisonAllocator { + heap: SvsmAllocator, +} + +impl PoisonAllocator { + const POISON_BYTE: u8 = 0xf7; + const WRITE_BYTE: u8 = 0x8; + + fn new() -> Self { + Self { + heap: SvsmAllocator::new(), + } + } + + unsafe fn unpoison_mem(&self, ptr: *mut u8, size: usize) { + ptr.write_bytes(Self::WRITE_BYTE, size); + } + + unsafe fn poison_mem(&self, ptr: *mut u8, size: usize) { + ptr.write_bytes(Self::POISON_BYTE, size); + } + + unsafe fn check_mem(&self, ptr: *mut u8, size: usize) { + for i in 0..size { + assert_eq!(ptr.add(i).read_volatile(), Self::WRITE_BYTE); + } + } + + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + let ptr = self.heap.alloc(layout); + if !ptr.is_null() { + self.unpoison_mem(ptr, layout.size()); + } + ptr + } + + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + self.check_mem(ptr, layout.size()); + self.poison_mem(ptr, layout.size()); + self.heap.dealloc(ptr, layout); + } + + unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_layout: Layout) -> *mut u8 { + self.check_mem(ptr, layout.size()); + self.poison_mem(ptr, layout.size()); + let ptr = self.heap.realloc(ptr, layout, new_layout.size()); + if !ptr.is_null() { + self.unpoison_mem(ptr, new_layout.size()); + } + ptr + } +} + +#[derive(Arbitrary, Debug)] +enum Action { + Alloc(FuzzLayout), + Free(usize), + Realloc(usize, NonZeroUsize), + Read(usize), +} + +#[derive(Arbitrary, Debug)] +struct FuzzInput { + root_mem_size: usize, + actions: Vec, +} + +fuzz_target!(|inp: FuzzInput| { + let _mem = TestRootMem::setup(adjust_mem_size(inp.root_mem_size)); + let heap = PoisonAllocator::new(); + let mut ptrs = Vec::new(); + + for action in inp.actions.into_iter() { + match action { + Action::Alloc(layout) => { + let Ok(layout) = Layout::try_from(layout) else { + continue; + }; + let ptr = unsafe { heap.alloc(layout) }; + if !ptr.is_null() { + ptrs.push((ptr, layout)); + } + } + Action::Free(idx) => { + if let Some(idx) = idx.checked_rem(ptrs.len()) { + let (ptr, layout) = ptrs.swap_remove(idx); + unsafe { heap.dealloc(ptr, layout) }; + } + } + Action::Read(idx) => { + if let Some(idx) = idx.checked_rem(ptrs.len()) { + let (ptr, layout) = ptrs[idx]; + unsafe { heap.check_mem(ptr, layout.size()) }; + }; + } + Action::Realloc(idx, new_size) => { + let Some(idx) = idx.checked_rem(ptrs.len()) else { + continue; + }; + + // Try to get the new layout. Alignment must be the same. + let new_size = new_size.get(); + let (ptr, layout) = ptrs.swap_remove(idx); + let Ok(new_layout) = Layout::from_size_align(new_size, layout.align()) else { + ptrs.push((ptr, layout)); + continue; + }; + + let ptr = unsafe { heap.realloc(ptr, layout, new_layout) }; + if !ptr.is_null() { + ptrs.push((ptr, new_layout)); + } + } + } + } +}); diff --git a/src/mm/alloc.rs b/src/mm/alloc.rs index fb7783cf28..da7eed153c 100644 --- a/src/mm/alloc.rs +++ b/src/mm/alloc.rs @@ -1164,7 +1164,7 @@ impl Slab { static SLAB_PAGE_SLAB: SpinLock = SpinLock::new(SlabPageSlab::new()); #[derive(Debug)] -struct SvsmAllocator { +pub struct SvsmAllocator { slabs: [SpinLock; 7], } @@ -1172,7 +1172,7 @@ impl SvsmAllocator { const MIN_SLAB_SIZE: u16 = 32; const MIN_ALIGNMENT: u32 = Self::MIN_SLAB_SIZE.trailing_zeros(); - const fn new() -> Self { + pub const fn new() -> Self { Self { slabs: [ SpinLock::new(Slab::new(Self::MIN_SLAB_SIZE)),