From b3c10034461830e32d9c276c8f864854e1fb21ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20L=C3=B3pez?= Date: Thu, 2 Nov 2023 15:40:14 +0100 Subject: [PATCH 1/2] fuzzing: add page allocator fuzzer MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add a fuzzer for the page allocator, which will run a series of semi-random actions like allocating, freeing, writing and reading pages. Signed-off-by: Carlos López --- fuzz/Cargo.toml | 6 ++ fuzz/fuzz_targets/page_alloc.rs | 164 ++++++++++++++++++++++++++++++++ 2 files changed, 170 insertions(+) create mode 100644 fuzz/fuzz_targets/page_alloc.rs diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml index 9282bed21..71c7e5eb1 100644 --- a/fuzz/Cargo.toml +++ b/fuzz/Cargo.toml @@ -38,3 +38,9 @@ name = "fs" path = "fuzz_targets/fs.rs" test = false doc = false + +[[bin]] +name = "page_alloc" +path = "fuzz_targets/page_alloc.rs" +test = false +doc = false diff --git a/fuzz/fuzz_targets/page_alloc.rs b/fuzz/fuzz_targets/page_alloc.rs new file mode 100644 index 000000000..4287faed6 --- /dev/null +++ b/fuzz/fuzz_targets/page_alloc.rs @@ -0,0 +1,164 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +// +// Copyright (c) 2023 SUSE LLC +// +// Author: Carlos López + +#![no_main] + +use arbitrary::Arbitrary; +use libfuzzer_sys::fuzz_target; +use std::collections::BTreeSet; +use svsm::address::VirtAddr; +use svsm::mm::alloc::{ + allocate_file_page, allocate_file_page_ref, allocate_page, allocate_pages, allocate_slab_page, + allocate_zeroed_page, free_page, get_order, TestRootMem, +}; +use svsm::types::PAGE_SIZE; + +const WRITE_BYTE: u8 = 0x66; +const POISON_BYTE: u8 = 0xfa; +const MIN_ROOT_MEM_SIZE: usize = 0x80000; +const MAX_ROOT_MEM_SIZE: usize = 0x100000; + +#[derive(Debug, Arbitrary)] +struct FuzzInput { + root_mem_size: usize, + actions: Vec, +} + +/// Actions during a fuzzing run +#[derive(Debug, Arbitrary)] +enum Action { + /// Allocate a regular page + Allocate, + /// Allocate a slab page + AllocateSlab, + /// Allocate pages of higher order + AllocatePages(usize), + /// Allocate a zeroed page + AllocateZeroed, + /// Allocate a file page + AllocateFile, + /// Write data to an allocated page + WritePage(usize), + /// Read data from an allocated & initialized page + ReadPage(usize), + /// Free an allocated page + Free(usize), + /// Allocate a page ref + AllocateFilePageRef, + /// Clone a page ref, increasing its refcount + CloneFilePageRef(usize), + /// Drop a page ref, decreasing its refcount + DropFilePageRef(usize), +} + +#[inline] +fn get_idx(v: &[T], idx: usize) -> Option { + idx.checked_rem(v.len()) +} + +#[inline] +fn get_item(v: &[T], idx: usize) -> Option<&T> { + let idx = get_idx(v, idx)?; + Some(unsafe { v.get_unchecked(idx) }) +} + +#[inline] +unsafe fn fill_page(page: VirtAddr, byte: u8) { + page.as_mut_ptr::().write_bytes(byte, PAGE_SIZE) +} + +#[inline] +fn adjust_mem_size(size: usize) -> usize { + MIN_ROOT_MEM_SIZE + (size % (MAX_ROOT_MEM_SIZE - MIN_ROOT_MEM_SIZE + 1)) +} + +fuzz_target!(|inp: FuzzInput| { + let _mem = TestRootMem::setup(adjust_mem_size(inp.root_mem_size)); + + // Regular pages + let mut pages = Vec::new(); + // Initialized regular pages + let mut inited = BTreeSet::new(); + // Page refs + let mut pagerefs = Vec::new(); + + for action in inp.actions.into_iter() { + match action { + Action::Allocate => { + if let Ok(page) = allocate_page() { + pages.push(page); + } + } + Action::AllocateSlab => { + if let Ok(page) = allocate_slab_page() { + pages.push(page); + } + } + Action::AllocatePages(size) => { + if let Ok(page) = allocate_pages(get_order(size)) { + pages.push(page); + } + } + Action::AllocateZeroed => { + if let Ok(page) = allocate_zeroed_page() { + pages.push(page); + inited.insert(page); + } + } + Action::AllocateFile => { + if let Ok(page) = allocate_file_page() { + pages.push(page); + // File pages are zeroed + inited.insert(page); + } + } + Action::WritePage(idx) => { + if let Some(page) = get_item(&pages, idx).copied() { + unsafe { fill_page(page, WRITE_BYTE) }; + inited.insert(page); + } + } + Action::ReadPage(idx) => { + if let Some(page) = get_item(&pages, idx) { + if inited.contains(page) { + let page_off = idx % PAGE_SIZE; + let val = unsafe { page.as_ptr::().add(page_off).read_volatile() }; + assert!(val == 0 || val == WRITE_BYTE); + } + } + } + Action::AllocateFilePageRef => { + if let Ok(pageref) = allocate_file_page_ref() { + pagerefs.push(pageref); + } + } + Action::DropFilePageRef(idx) => { + if let Some(idx) = get_idx(&pagerefs, idx) { + let _ = pagerefs.swap_remove(idx); + } + } + Action::CloneFilePageRef(idx) => { + if let Some(pageref) = get_item(&pagerefs, idx) { + pagerefs.push(pageref.clone()); + } + } + Action::Free(idx) => { + if let Some(idx) = get_idx(&pages, idx) { + let page = pages.swap_remove(idx); + inited.remove(&page); + unsafe { fill_page(page, POISON_BYTE) }; + free_page(page); + } + } + } + } + + for page in pages.into_iter() { + free_page(page); + } + + pagerefs.clear(); +}); From 14c56b68b47c9c9a3d6c429d25f8586f53c27b0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20L=C3=B3pez?= Date: Tue, 7 Nov 2023 14:53:13 +0100 Subject: [PATCH 2/2] fuzzing: add SvsmAllocator fuzzer MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add a new fuzzing harness for the SvsmAllocator, which will perform allocations will run a series of semi-random actions like allocating, freeing and reading memory. Unfortunately this requires access to the SvsmAllocator type, so it needs to be public. Signed-off-by: Carlos López --- fuzz/Cargo.toml | 6 ++ fuzz/fuzz_targets/alloc.rs | 158 +++++++++++++++++++++++++++++++++++++ src/mm/alloc.rs | 4 +- 3 files changed, 166 insertions(+), 2 deletions(-) create mode 100644 fuzz/fuzz_targets/alloc.rs diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml index 71c7e5eb1..023c3ae40 100644 --- a/fuzz/Cargo.toml +++ b/fuzz/Cargo.toml @@ -44,3 +44,9 @@ name = "page_alloc" path = "fuzz_targets/page_alloc.rs" test = false doc = false + +[[bin]] +name = "alloc" +path = "fuzz_targets/alloc.rs" +test = false +doc = false diff --git a/fuzz/fuzz_targets/alloc.rs b/fuzz/fuzz_targets/alloc.rs new file mode 100644 index 000000000..0cb5cb497 --- /dev/null +++ b/fuzz/fuzz_targets/alloc.rs @@ -0,0 +1,158 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +// +// Copyright (c) 2023 SUSE LLC +// +// Author: Carlos López + +#![no_main] + +use arbitrary::Arbitrary; +use core::alloc::{GlobalAlloc, Layout, LayoutError}; +use core::num::NonZeroUsize; +use libfuzzer_sys::fuzz_target; +use svsm::mm::alloc::{SvsmAllocator, TestRootMem}; + +const MIN_ROOT_MEM_SIZE: usize = 0x8000; +const MAX_ROOT_MEM_SIZE: usize = 0x100000; + +#[inline] +fn adjust_mem_size(size: usize) -> usize { + MIN_ROOT_MEM_SIZE + (size % (MAX_ROOT_MEM_SIZE - MIN_ROOT_MEM_SIZE + 1)) +} + +#[derive(Arbitrary, Debug)] +struct FuzzLayout { + size: usize, + align: usize, +} + +impl TryFrom for Layout { + type Error = LayoutError; + + fn try_from(ly: FuzzLayout) -> Result { + Self::from_size_align(ly.size, ly.align) + } +} + +/// A wrapper around SvsmAllocator that marks memory as initialized or +/// uninitialized on allocation and deallocation respectively. +struct PoisonAllocator { + heap: SvsmAllocator, +} + +impl PoisonAllocator { + const POISON_BYTE: u8 = 0xf7; + const WRITE_BYTE: u8 = 0x8; + + fn new() -> Self { + Self { + heap: SvsmAllocator::new(), + } + } + + unsafe fn unpoison_mem(&self, ptr: *mut u8, size: usize) { + ptr.write_bytes(Self::WRITE_BYTE, size); + } + + unsafe fn poison_mem(&self, ptr: *mut u8, size: usize) { + ptr.write_bytes(Self::POISON_BYTE, size); + } + + unsafe fn check_mem(&self, ptr: *mut u8, size: usize) { + for i in 0..size { + assert_eq!(ptr.add(i).read_volatile(), Self::WRITE_BYTE); + } + } + + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + let ptr = self.heap.alloc(layout); + if !ptr.is_null() { + self.unpoison_mem(ptr, layout.size()); + } + ptr + } + + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + self.check_mem(ptr, layout.size()); + self.poison_mem(ptr, layout.size()); + self.heap.dealloc(ptr, layout); + } + + unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_layout: Layout) -> *mut u8 { + self.check_mem(ptr, layout.size()); + self.poison_mem(ptr, layout.size()); + let ptr = self.heap.realloc(ptr, layout, new_layout.size()); + if !ptr.is_null() { + self.unpoison_mem(ptr, new_layout.size()); + } + ptr + } +} + +#[derive(Arbitrary, Debug)] +enum Action { + Alloc(FuzzLayout), + Free(usize), + Realloc(usize, NonZeroUsize), + Read(usize), +} + +#[derive(Arbitrary, Debug)] +struct FuzzInput { + root_mem_size: usize, + actions: Vec, +} + +fuzz_target!(|inp: FuzzInput| { + let _mem = TestRootMem::setup(adjust_mem_size(inp.root_mem_size)); + let heap = PoisonAllocator::new(); + let mut ptrs = Vec::new(); + + for action in inp.actions.into_iter() { + match action { + Action::Alloc(layout) => { + let Ok(layout) = Layout::try_from(layout) else { + continue; + }; + let ptr = unsafe { heap.alloc(layout) }; + if !ptr.is_null() { + ptrs.push((ptr, layout)); + } + } + Action::Free(idx) => { + if let Some(idx) = idx.checked_rem(ptrs.len()) { + let (ptr, layout) = ptrs.swap_remove(idx); + unsafe { heap.dealloc(ptr, layout) }; + } + } + Action::Read(idx) => { + if let Some(idx) = idx.checked_rem(ptrs.len()) { + let (ptr, layout) = ptrs[idx]; + unsafe { heap.check_mem(ptr, layout.size()) }; + }; + } + Action::Realloc(idx, new_size) => { + let Some(idx) = idx.checked_rem(ptrs.len()) else { + continue; + }; + + // Try to get the new layout. Alignment must be the same. + let new_size = new_size.get(); + let (ptr, layout) = ptrs.swap_remove(idx); + let Ok(new_layout) = Layout::from_size_align(new_size, layout.align()) else { + ptrs.push((ptr, layout)); + continue; + }; + + let ptr = unsafe { heap.realloc(ptr, layout, new_layout) }; + if !ptr.is_null() { + ptrs.push((ptr, new_layout)); + } + } + } + } + + for (ptr, layout) in ptrs.into_iter() { + unsafe { heap.dealloc(ptr, layout) }; + } +}); diff --git a/src/mm/alloc.rs b/src/mm/alloc.rs index fb7783cf2..da7eed153 100644 --- a/src/mm/alloc.rs +++ b/src/mm/alloc.rs @@ -1164,7 +1164,7 @@ impl Slab { static SLAB_PAGE_SLAB: SpinLock = SpinLock::new(SlabPageSlab::new()); #[derive(Debug)] -struct SvsmAllocator { +pub struct SvsmAllocator { slabs: [SpinLock; 7], } @@ -1172,7 +1172,7 @@ impl SvsmAllocator { const MIN_SLAB_SIZE: u16 = 32; const MIN_ALIGNMENT: u32 = Self::MIN_SLAB_SIZE.trailing_zeros(); - const fn new() -> Self { + pub const fn new() -> Self { Self { slabs: [ SpinLock::new(Slab::new(Self::MIN_SLAB_SIZE)),