From 753b3a4e8b58b2f8324040e6368812af5073c5a2 Mon Sep 17 00:00:00 2001 From: Bradlee Speice Date: Sat, 10 Nov 2018 01:30:39 -0500 Subject: [PATCH] Multi-threaded allocation checker! Holy crap that was much harder than expected --- Cargo.toml | 1 - src/lib.rs | 120 +++++++++++++++++++++++++------------------ tests/allocations.rs | 31 +++++------ 3 files changed, 87 insertions(+), 65 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 8b19f40..cb22c33 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,7 +9,6 @@ categories = ["allocator", "nostd"] repository = "https://github.com/bspeice/qadapt.git" [dependencies] -backtrace = "0.3" libc = "0.2" log = "0.4" spin = "0.4" diff --git a/src/lib.rs b/src/lib.rs index ccfdfd4..c0a3e75 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,80 +1,102 @@ -extern crate backtrace; extern crate libc; extern crate qadapt_macro; -#[macro_use] -extern crate log; extern crate spin; // Re-export the proc macros to use by other code pub use qadapt_macro::*; -use backtrace::Backtrace; use libc::c_void; use libc::free; use libc::malloc; -use log::Level; +use spin::Mutex; use std::alloc::Layout; use std::alloc::GlobalAlloc; -use spin::RwLock; +use std::sync::RwLock; +use std::thread; -static DO_PANIC: RwLock = RwLock::new(false); -static INTERNAL_ALLOCATION: RwLock = RwLock::new(false); -static LOG_LEVEL: RwLock = RwLock::new(Level::Debug); +static THREAD_LOCAL_LOCK: Mutex<()> = Mutex::new(()); +thread_local! { + static PROTECTION_LEVEL: RwLock = RwLock::new(0); +} pub struct QADAPT; -pub fn set_panic(b: bool) { - let mut val = DO_PANIC.write(); - if *val == b { - let level = LOG_LEVEL.read(); - if log_enabled!(*level) { - log!(*level, "Panic flag was already {}, potential data race", b) - } +pub fn enter_protected() { + if thread::panicking() { + return } - *val = b; + PROTECTION_LEVEL.try_with(|v| { + *v.write().unwrap() += 1; + }).unwrap_or_else(|_e| ()); } -pub fn set_log_level(level: Level) { - *LOG_LEVEL.write() = level; +pub fn exit_protected() { + if thread::panicking() { + return + } + + PROTECTION_LEVEL.try_with(|v| { + let val = { *v.read().unwrap() }; + match val { + v if v == 0 => panic!("Attempt to exit protected too many times"), + _ => { + *v.write().unwrap() -= 1; + } + } + }).unwrap_or_else(|_e| ()); } unsafe impl GlobalAlloc for QADAPT { unsafe fn alloc(&self, layout: Layout) -> *mut u8 { - // Use a block to release the read guard - let should_panic = { *DO_PANIC.read() }; - - if should_panic && !*INTERNAL_ALLOCATION.read() { - // Only trip one panic at a time, don't want to cause issues on potential rewind - *DO_PANIC.write() = false; - panic!("Unexpected allocation") - } else if log_enabled!(*LOG_LEVEL.read()) { - // We wrap in a block because we need to release the write guard - // so allocations during `Backtrace::new()` can read - { *INTERNAL_ALLOCATION.write() = true; } - - let bt = Backtrace::new(); - log!(*LOG_LEVEL.read(), "Unexpected allocation:\n{:?}", bt); - - *INTERNAL_ALLOCATION.write() = false; + // If we're attempting to allocate our PROTECTION_LEVEL thread local, + // just allow it through + if thread::panicking() || THREAD_LOCAL_LOCK.try_lock().is_none() { + return malloc(layout.size()) as *mut u8; } - malloc(layout.size()) as *mut u8 + let protection_level: Result = { + let _lock = THREAD_LOCAL_LOCK.lock(); + PROTECTION_LEVEL.try_with(|v| *v.read().unwrap()) + .or(Ok(0)) + }; + + match protection_level { + Ok(v) if v == 0 => malloc(layout.size()) as *mut u8, + //Ok(v) => panic!("Unexpected allocation for size {}, protection level: {}", layout.size(), v), + Ok(v) => { + // Tripped a bad allocation, but make sure further allocation/deallocation during unwind + // doesn't have issues + PROTECTION_LEVEL.with(|v| *v.write().unwrap() = 0); + panic!("Unexpected allocation for size {}, protection level: {}", layout.size(), v) + } + Err(_) => { + // It shouldn't be possible to reach this point... + panic!("Unexpected error for fetching protection level") + } + } } - unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) { - if *DO_PANIC.read() && !*INTERNAL_ALLOCATION.read() { - panic!("Unexpected drop") - } else if log_enabled!(*LOG_LEVEL.read()) { - // We wrap in a block because we need to release the write guard - // so allocations during `Backtrace::new()` can read - { *INTERNAL_ALLOCATION.write() = true; } - - let bt = Backtrace::new(); - log!(*LOG_LEVEL.read(), "Unexpected drop:\n{:?}", bt); - - *INTERNAL_ALLOCATION.write() = false; + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + if thread::panicking() || THREAD_LOCAL_LOCK.try_lock().is_none() { + return free(ptr as *mut c_void); + } + + let protection_level: Result = { + let _lock = THREAD_LOCAL_LOCK.lock(); + PROTECTION_LEVEL.try_with(|v| *v.read().unwrap()) + .or(Ok(0)) + }; + + free(ptr as *mut c_void); + match protection_level { + Ok(v) if v > 0 => { + // Tripped a bad dealloc, but make sure further memory access during unwind + // doesn't have issues + PROTECTION_LEVEL.with(|v| *v.write().unwrap() = 0); + panic!("Unexpected deallocation for size {}, protection level: {}", layout.size(), v) + }, + _ => () } - free(ptr as *mut c_void) } } diff --git a/tests/allocations.rs b/tests/allocations.rs index 327444d..44a17dd 100644 --- a/tests/allocations.rs +++ b/tests/allocations.rs @@ -2,7 +2,8 @@ extern crate qadapt; use qadapt::QADAPT; -use qadapt::set_panic; +use qadapt::enter_protected; +use qadapt::exit_protected; #[global_allocator] static Q: QADAPT = QADAPT; @@ -15,17 +16,17 @@ pub fn black_box(dummy: T) -> T { #[test] fn test_copy() { - set_panic(true); + enter_protected(); black_box(0u8); - set_panic(false); + exit_protected(); } #[test] #[should_panic] fn test_allocate() { - set_panic(true); + enter_protected(); let _x = Box::new(12); - set_panic(false); + exit_protected(); } fn unit_result(b: bool) -> Result<(), ()> { @@ -38,51 +39,51 @@ fn unit_result(b: bool) -> Result<(), ()> { #[test] fn test_unit_result() { - set_panic(true); + enter_protected(); #[allow(unused)] { black_box(unit_result(true)); } black_box(unit_result(true)).unwrap(); #[allow(unused)] { black_box(unit_result(false)); } black_box(unit_result(false)).unwrap_err(); - set_panic(false); + exit_protected(); } #[test] #[should_panic] fn test_vec_push() { let mut v = Vec::new(); - set_panic(true); + enter_protected(); v.push(0); } #[test] fn test_vec_push_capacity() { let mut v = Vec::with_capacity(1); - set_panic(true); + enter_protected(); v.push(0); v.pop(); v.push(0); - set_panic(false); + exit_protected(); } #[test] fn test_vec_with_zero() { - set_panic(true); + enter_protected(); let _v: Vec = black_box(Vec::with_capacity(0)); - set_panic(false); + exit_protected(); } #[test] fn test_vec_new() { - set_panic(true); + enter_protected(); let _v: Vec = black_box(Vec::new()); - set_panic(false); + exit_protected(); } #[test] #[should_panic] fn test_vec_with_one() { - set_panic(true); + enter_protected(); let _v: Vec = Vec::with_capacity(1); } \ No newline at end of file