Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fuzzing: add allocator harnesses #148

Merged
merged 2 commits into from
Nov 20, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions fuzz/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -38,3 +38,15 @@ name = "fs"
path = "fuzz_targets/fs.rs"
test = false
doc = false

[[bin]]
name = "page_alloc"
path = "fuzz_targets/page_alloc.rs"
test = false
doc = false

[[bin]]
name = "alloc"
path = "fuzz_targets/alloc.rs"
test = false
doc = false
158 changes: 158 additions & 0 deletions fuzz/fuzz_targets/alloc.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,158 @@
// SPDX-License-Identifier: MIT OR Apache-2.0
//
// Copyright (c) 2023 SUSE LLC
//
// Author: Carlos López <[email protected]>

#![no_main]

use arbitrary::Arbitrary;
use core::alloc::{GlobalAlloc, Layout, LayoutError};
use core::num::NonZeroUsize;
use libfuzzer_sys::fuzz_target;
use svsm::mm::alloc::{SvsmAllocator, TestRootMem};

const MIN_ROOT_MEM_SIZE: usize = 0x8000;
const MAX_ROOT_MEM_SIZE: usize = 0x100000;

#[inline]
fn adjust_mem_size(size: usize) -> usize {
MIN_ROOT_MEM_SIZE + (size % (MAX_ROOT_MEM_SIZE - MIN_ROOT_MEM_SIZE + 1))
}

#[derive(Arbitrary, Debug)]
struct FuzzLayout {
size: usize,
align: usize,
}

impl TryFrom<FuzzLayout> for Layout {
type Error = LayoutError;

fn try_from(ly: FuzzLayout) -> Result<Self, Self::Error> {
Self::from_size_align(ly.size, ly.align)
}
}

/// A wrapper around SvsmAllocator that marks memory as initialized or
/// uninitialized on allocation and deallocation respectively.
struct PoisonAllocator {
heap: SvsmAllocator,
}

impl PoisonAllocator {
const POISON_BYTE: u8 = 0xf7;
const WRITE_BYTE: u8 = 0x8;

fn new() -> Self {
Self {
heap: SvsmAllocator::new(),
}
}

unsafe fn unpoison_mem(&self, ptr: *mut u8, size: usize) {
ptr.write_bytes(Self::WRITE_BYTE, size);
}

unsafe fn poison_mem(&self, ptr: *mut u8, size: usize) {
ptr.write_bytes(Self::POISON_BYTE, size);
}

unsafe fn check_mem(&self, ptr: *mut u8, size: usize) {
for i in 0..size {
assert_eq!(ptr.add(i).read_volatile(), Self::WRITE_BYTE);
}
}

unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let ptr = self.heap.alloc(layout);
if !ptr.is_null() {
self.unpoison_mem(ptr, layout.size());
}
ptr
}

unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
self.check_mem(ptr, layout.size());
self.poison_mem(ptr, layout.size());
self.heap.dealloc(ptr, layout);
}

unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_layout: Layout) -> *mut u8 {
self.check_mem(ptr, layout.size());
self.poison_mem(ptr, layout.size());
let ptr = self.heap.realloc(ptr, layout, new_layout.size());
if !ptr.is_null() {
self.unpoison_mem(ptr, new_layout.size());
}
ptr
}
}

#[derive(Arbitrary, Debug)]
enum Action {
Alloc(FuzzLayout),
Free(usize),
Realloc(usize, NonZeroUsize),
Read(usize),
}

#[derive(Arbitrary, Debug)]
struct FuzzInput {
root_mem_size: usize,
actions: Vec<Action>,
}

fuzz_target!(|inp: FuzzInput| {
let _mem = TestRootMem::setup(adjust_mem_size(inp.root_mem_size));
let heap = PoisonAllocator::new();
let mut ptrs = Vec::new();

for action in inp.actions.into_iter() {
match action {
Action::Alloc(layout) => {
let Ok(layout) = Layout::try_from(layout) else {
continue;
};
let ptr = unsafe { heap.alloc(layout) };
if !ptr.is_null() {
ptrs.push((ptr, layout));
}
}
Action::Free(idx) => {
if let Some(idx) = idx.checked_rem(ptrs.len()) {
let (ptr, layout) = ptrs.swap_remove(idx);
unsafe { heap.dealloc(ptr, layout) };
}
}
Action::Read(idx) => {
if let Some(idx) = idx.checked_rem(ptrs.len()) {
let (ptr, layout) = ptrs[idx];
unsafe { heap.check_mem(ptr, layout.size()) };
};
}
Action::Realloc(idx, new_size) => {
let Some(idx) = idx.checked_rem(ptrs.len()) else {
continue;
};

// Try to get the new layout. Alignment must be the same.
let new_size = new_size.get();
let (ptr, layout) = ptrs.swap_remove(idx);
let Ok(new_layout) = Layout::from_size_align(new_size, layout.align()) else {
ptrs.push((ptr, layout));
continue;
};

let ptr = unsafe { heap.realloc(ptr, layout, new_layout) };
if !ptr.is_null() {
ptrs.push((ptr, new_layout));
}
}
}
}
00xc marked this conversation as resolved.
Show resolved Hide resolved

for (ptr, layout) in ptrs.into_iter() {
unsafe { heap.dealloc(ptr, layout) };
}
});
164 changes: 164 additions & 0 deletions fuzz/fuzz_targets/page_alloc.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,164 @@
// SPDX-License-Identifier: MIT OR Apache-2.0
//
// Copyright (c) 2023 SUSE LLC
//
// Author: Carlos López <[email protected]>

#![no_main]

use arbitrary::Arbitrary;
use libfuzzer_sys::fuzz_target;
use std::collections::BTreeSet;
use svsm::address::VirtAddr;
use svsm::mm::alloc::{
allocate_file_page, allocate_file_page_ref, allocate_page, allocate_pages, allocate_slab_page,
allocate_zeroed_page, free_page, get_order, TestRootMem,
};
use svsm::types::PAGE_SIZE;

const WRITE_BYTE: u8 = 0x66;
const POISON_BYTE: u8 = 0xfa;
const MIN_ROOT_MEM_SIZE: usize = 0x80000;
const MAX_ROOT_MEM_SIZE: usize = 0x100000;

#[derive(Debug, Arbitrary)]
struct FuzzInput {
root_mem_size: usize,
actions: Vec<Action>,
}

/// Actions during a fuzzing run
#[derive(Debug, Arbitrary)]
enum Action {
/// Allocate a regular page
Allocate,
/// Allocate a slab page
AllocateSlab,
/// Allocate pages of higher order
AllocatePages(usize),
/// Allocate a zeroed page
AllocateZeroed,
/// Allocate a file page
AllocateFile,
/// Write data to an allocated page
WritePage(usize),
/// Read data from an allocated & initialized page
ReadPage(usize),
/// Free an allocated page
Free(usize),
/// Allocate a page ref
AllocateFilePageRef,
/// Clone a page ref, increasing its refcount
CloneFilePageRef(usize),
/// Drop a page ref, decreasing its refcount
DropFilePageRef(usize),
}

#[inline]
fn get_idx<T>(v: &[T], idx: usize) -> Option<usize> {
idx.checked_rem(v.len())
}

#[inline]
fn get_item<T>(v: &[T], idx: usize) -> Option<&T> {
let idx = get_idx(v, idx)?;
Some(unsafe { v.get_unchecked(idx) })
}

#[inline]
unsafe fn fill_page(page: VirtAddr, byte: u8) {
page.as_mut_ptr::<u8>().write_bytes(byte, PAGE_SIZE)
}

#[inline]
fn adjust_mem_size(size: usize) -> usize {
MIN_ROOT_MEM_SIZE + (size % (MAX_ROOT_MEM_SIZE - MIN_ROOT_MEM_SIZE + 1))
}

fuzz_target!(|inp: FuzzInput| {
let _mem = TestRootMem::setup(adjust_mem_size(inp.root_mem_size));

// Regular pages
let mut pages = Vec::new();
// Initialized regular pages
let mut inited = BTreeSet::new();
// Page refs
let mut pagerefs = Vec::new();

for action in inp.actions.into_iter() {
match action {
Action::Allocate => {
if let Ok(page) = allocate_page() {
pages.push(page);
}
}
Action::AllocateSlab => {
if let Ok(page) = allocate_slab_page() {
pages.push(page);
}
}
Action::AllocatePages(size) => {
if let Ok(page) = allocate_pages(get_order(size)) {
pages.push(page);
}
}
Action::AllocateZeroed => {
if let Ok(page) = allocate_zeroed_page() {
pages.push(page);
inited.insert(page);
}
}
Action::AllocateFile => {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

get_file_page() and put_file_page() can also be called for a wider coverage?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As far as I can see they are already called when cloning and dropping a PageRef respectively. In fact have made those functions private in #149 as the reference count should not be directly manipulated.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Alright. got it.

if let Ok(page) = allocate_file_page() {
pages.push(page);
// File pages are zeroed
inited.insert(page);
}
}
Action::WritePage(idx) => {
if let Some(page) = get_item(&pages, idx).copied() {
unsafe { fill_page(page, WRITE_BYTE) };
inited.insert(page);
}
}
Action::ReadPage(idx) => {
if let Some(page) = get_item(&pages, idx) {
if inited.contains(page) {
let page_off = idx % PAGE_SIZE;
let val = unsafe { page.as_ptr::<u8>().add(page_off).read_volatile() };
assert!(val == 0 || val == WRITE_BYTE);
}
}
}
Action::AllocateFilePageRef => {
if let Ok(pageref) = allocate_file_page_ref() {
pagerefs.push(pageref);
}
}
Action::DropFilePageRef(idx) => {
if let Some(idx) = get_idx(&pagerefs, idx) {
let _ = pagerefs.swap_remove(idx);
}
}
Action::CloneFilePageRef(idx) => {
if let Some(pageref) = get_item(&pagerefs, idx) {
pagerefs.push(pageref.clone());
}
}
Action::Free(idx) => {
if let Some(idx) = get_idx(&pages, idx) {
let page = pages.swap_remove(idx);
inited.remove(&page);
unsafe { fill_page(page, POISON_BYTE) };
free_page(page);
}
}
}
}

for page in pages.into_iter() {
free_page(page);
}

pagerefs.clear();
});
4 changes: 2 additions & 2 deletions src/mm/alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1164,15 +1164,15 @@ impl Slab {
static SLAB_PAGE_SLAB: SpinLock<SlabPageSlab> = SpinLock::new(SlabPageSlab::new());

#[derive(Debug)]
struct SvsmAllocator {
pub struct SvsmAllocator {
slabs: [SpinLock<Slab>; 7],
}

impl SvsmAllocator {
const MIN_SLAB_SIZE: u16 = 32;
const MIN_ALIGNMENT: u32 = Self::MIN_SLAB_SIZE.trailing_zeros();

const fn new() -> Self {
pub const fn new() -> Self {
Self {
slabs: [
SpinLock::new(Slab::new(Self::MIN_SLAB_SIZE)),
Expand Down
Loading