mirror of
https://github.com/bspeice/qadapt
synced 2024-11-21 13:28:11 -05:00
Much simpler take, and actually works this time.
This commit is contained in:
parent
a7c7571b49
commit
03310c6372
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "qadapt"
|
||||
version = "0.2.1"
|
||||
version = "0.3.0"
|
||||
authors = ["Bradlee Speice <bradlee@speice.io>"]
|
||||
description = "The Quick And Dirty Allocation Profiling Tool"
|
||||
license = "Apache-2.0"
|
||||
@ -9,5 +9,7 @@ categories = ["allocator", "nostd"]
|
||||
repository = "https://github.com/bspeice/qadapt.git"
|
||||
|
||||
[dependencies]
|
||||
libc = { version = "0.2", default-features = false }
|
||||
backtrace = "0.3"
|
||||
libc = "0.2"
|
||||
log = "0.4"
|
||||
spin = "0.4"
|
14
README.md
14
README.md
@ -1,13 +1,7 @@
|
||||
The Quick And Dirty Allocation Profiling Tool
|
||||
=============================================
|
||||
|
||||
A simple attempt at a `#[no_std]` compatible allocator that can track
|
||||
allocations on a per-thread basis, for the purpose of guaranteeing that
|
||||
performance-critical code doesn't trigger an allocation while running.
|
||||
|
||||
Nightly Rust is required because of a dependence on the `alloc` crate.
|
||||
|
||||
The current state has all the infrastructure in place, but the tests are a bit
|
||||
flaky. As such, this crate likely won't see much further development; if you
|
||||
are interested in claiming the qadapt name, please reach out to the author
|
||||
at [bradlee@speice.io](mailto:bradlee@speice.io).
|
||||
A simple attempt at an allocator that can let you know if allocations
|
||||
are happening in places you didn't intend. This is primarily used for
|
||||
guaranteeing that performance-critical code doesn't trigger an allocation
|
||||
while running.
|
||||
|
@ -1,5 +0,0 @@
|
||||
/// Anything that can be initialized with a `const` value.
|
||||
pub(crate) trait ConstInit {
|
||||
/// The `const` default initializer value for `Self`.
|
||||
const INIT: Self;
|
||||
}
|
151
src/lib.rs
151
src/lib.rs
@ -1,137 +1,68 @@
|
||||
#![no_std]
|
||||
#![feature(alloc)]
|
||||
|
||||
extern crate alloc;
|
||||
extern crate backtrace;
|
||||
extern crate libc;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
extern crate spin;
|
||||
|
||||
use alloc::collections::btree_map::BTreeMap;
|
||||
use backtrace::Backtrace;
|
||||
use libc::c_void;
|
||||
use libc::free;
|
||||
use libc::malloc;
|
||||
use core::alloc::Layout;
|
||||
use core::alloc::GlobalAlloc;
|
||||
use core::sync::atomic::AtomicBool;
|
||||
use core::sync::atomic::Ordering;
|
||||
use log::Level;
|
||||
use std::alloc::Layout;
|
||||
use std::alloc::GlobalAlloc;
|
||||
use spin::RwLock;
|
||||
|
||||
mod const_init;
|
||||
use const_init::ConstInit;
|
||||
static DO_PANIC: RwLock<bool> = RwLock::new(false);
|
||||
static INTERNAL_ALLOCATION: RwLock<bool> = RwLock::new(false);
|
||||
static LOG_LEVEL: RwLock<Level> = RwLock::new(Level::Debug);
|
||||
|
||||
mod thread_id;
|
||||
pub struct QADAPT;
|
||||
|
||||
// TODO: Doesn't check for race conditions
|
||||
static INTERNAL_ALLOCATION: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
pub struct QADAPTInternal {
|
||||
pub thread_has_allocated: BTreeMap<usize, AtomicBool>,
|
||||
pub recording_enabled: BTreeMap<usize, AtomicBool>
|
||||
pub fn set_panic(b: bool) {
|
||||
*DO_PANIC.write() = b;
|
||||
}
|
||||
|
||||
pub struct QADAPT {
|
||||
internal: spin::Once<RwLock<QADAPTInternal>>
|
||||
pub fn set_log_level(level: Level) {
|
||||
*LOG_LEVEL.write() = level;
|
||||
}
|
||||
|
||||
impl ConstInit for QADAPT {
|
||||
const INIT: QADAPT = QADAPT {
|
||||
internal: spin::Once::new()
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
unsafe impl GlobalAlloc for QADAPT {
|
||||
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
|
||||
if !INTERNAL_ALLOCATION.load(Ordering::SeqCst) {
|
||||
let tid = thread_id::get();
|
||||
// Use a block to release the read guard
|
||||
let should_panic = { *DO_PANIC.read() };
|
||||
|
||||
// Need to use RAII guard because record_allocation() needs write access
|
||||
let should_record = {
|
||||
let internal = self.internal().read();
|
||||
internal.recording_enabled.contains_key(&tid)
|
||||
&& internal.recording_enabled.get(&tid).unwrap().load(Ordering::SeqCst)
|
||||
};
|
||||
if should_panic && !*INTERNAL_ALLOCATION.read() {
|
||||
// Only trip one panic at a time, don't want to cause issues on potential rewind
|
||||
*DO_PANIC.write() = false;
|
||||
panic!("Unexpected allocation")
|
||||
} else if log_enabled!(*LOG_LEVEL.read()) {
|
||||
// We wrap in a block because we need to release the write guard
|
||||
// so allocations during `Backtrace::new()` can read
|
||||
{ *INTERNAL_ALLOCATION.write() = true; }
|
||||
|
||||
if should_record {
|
||||
self.record_allocation(thread_id::get())
|
||||
}
|
||||
let bt = Backtrace::new();
|
||||
log!(*LOG_LEVEL.read(), "Unexpected allocation:\n{:?}", bt);
|
||||
|
||||
*INTERNAL_ALLOCATION.write() = false;
|
||||
}
|
||||
|
||||
malloc(layout.size()) as *mut u8
|
||||
}
|
||||
|
||||
unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) {
|
||||
if *DO_PANIC.read() && !*INTERNAL_ALLOCATION.read() {
|
||||
panic!("Unexpected drop")
|
||||
} else if log_enabled!(*LOG_LEVEL.read()) {
|
||||
// We wrap in a block because we need to release the write guard
|
||||
// so allocations during `Backtrace::new()` can read
|
||||
{ *INTERNAL_ALLOCATION.write() = true; }
|
||||
|
||||
let bt = Backtrace::new();
|
||||
log!(*LOG_LEVEL.read(), "Unexpected drop:\n{:?}", bt);
|
||||
|
||||
*INTERNAL_ALLOCATION.write() = false;
|
||||
}
|
||||
free(ptr as *mut c_void)
|
||||
}
|
||||
}
|
||||
|
||||
impl QADAPT {
|
||||
pub const INIT: Self = <Self as ConstInit>::INIT;
|
||||
|
||||
fn internal(&self) -> &RwLock<QADAPTInternal> {
|
||||
|
||||
self.internal.call_once(|| {
|
||||
INTERNAL_ALLOCATION.store(true, Ordering::SeqCst);
|
||||
let q = QADAPTInternal {
|
||||
thread_has_allocated: BTreeMap::new(),
|
||||
recording_enabled: BTreeMap::new()
|
||||
};
|
||||
INTERNAL_ALLOCATION.store(false, Ordering::SeqCst);
|
||||
|
||||
RwLock::new(q)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn reset_allocation_state(&self) {
|
||||
let internal = self.internal().write();
|
||||
for (_tid, has_allocated) in &internal.thread_has_allocated {
|
||||
|
||||
has_allocated.store(false, Ordering::SeqCst);
|
||||
}
|
||||
for (_tid, recording_enabled) in &internal.recording_enabled {
|
||||
|
||||
recording_enabled.store(false, Ordering::SeqCst);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_allocated_current(&self) -> bool {
|
||||
let tid = thread_id::get();
|
||||
let internal = self.internal().read();
|
||||
|
||||
// UNWRAP: Already checked for existence
|
||||
internal.thread_has_allocated.contains_key(&tid)
|
||||
&& internal.thread_has_allocated.get(&tid).unwrap().load(Ordering::SeqCst)
|
||||
}
|
||||
|
||||
pub fn record_allocation(&self, thread_id: usize) {
|
||||
let mut internal = self.internal().write();
|
||||
if internal.thread_has_allocated.contains_key(&thread_id) {
|
||||
// UNWRAP: Already checked for existence
|
||||
internal.thread_has_allocated.get(&thread_id)
|
||||
.unwrap().store(true, Ordering::SeqCst)
|
||||
}
|
||||
else {
|
||||
INTERNAL_ALLOCATION.store(true, Ordering::SeqCst);
|
||||
internal.thread_has_allocated.insert(thread_id, AtomicBool::new(true));
|
||||
INTERNAL_ALLOCATION.store(false, Ordering::SeqCst);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn enable_recording_current(&self) {
|
||||
self.enable_recording(thread_id::get());
|
||||
}
|
||||
|
||||
pub fn enable_recording(&self, tid: usize) {
|
||||
let mut internal = self.internal().write();
|
||||
|
||||
if internal.recording_enabled.contains_key(&tid) {
|
||||
// UNWRAP: Already checked for existence
|
||||
internal.recording_enabled.get(&tid).unwrap().store(true, Ordering::SeqCst);
|
||||
}
|
||||
else {
|
||||
INTERNAL_ALLOCATION.store(true, Ordering::SeqCst);
|
||||
internal.recording_enabled.insert(tid, AtomicBool::new(true));
|
||||
INTERNAL_ALLOCATION.store(false, Ordering::SeqCst);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +0,0 @@
|
||||
/// Taken from https://crates.io/crates/thread-id and re-purposed to be no-std safe
|
||||
use libc;
|
||||
|
||||
pub fn get() -> usize {
|
||||
unsafe { libc::pthread_self() as usize }
|
||||
}
|
@ -1,81 +0,0 @@
|
||||
extern crate qadapt;
|
||||
|
||||
use qadapt::QADAPT;
|
||||
use std::alloc::alloc;
|
||||
use std::alloc::Layout;
|
||||
|
||||
#[global_allocator]
|
||||
static A: QADAPT = QADAPT::INIT;
|
||||
|
||||
#[test]
|
||||
fn alloc_nonnull() {
|
||||
unsafe {
|
||||
assert!(!alloc(Layout::new::<u32>()).is_null())
|
||||
}
|
||||
}
|
||||
|
||||
struct Empty;
|
||||
|
||||
struct NonEmpty {
|
||||
_x: i32,
|
||||
_y: i32
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allocation_flag() {
|
||||
A.reset_allocation_state();
|
||||
A.enable_recording_current();
|
||||
assert!(!A.has_allocated_current());
|
||||
|
||||
let _x = 24;
|
||||
assert!(!A.has_allocated_current());
|
||||
|
||||
let _x = Empty {};
|
||||
assert!(!A.has_allocated_current());
|
||||
|
||||
let _x = NonEmpty {
|
||||
_x: 42,
|
||||
_y: 84
|
||||
};
|
||||
assert!(!A.has_allocated_current());
|
||||
|
||||
let _x = Box::new(42);
|
||||
assert!(A.has_allocated_current());
|
||||
}
|
||||
|
||||
#[inline(never)]
|
||||
fn no_op() {}
|
||||
|
||||
#[test]
|
||||
fn no_alloc_during_noop() {
|
||||
A.reset_allocation_state();
|
||||
A.enable_recording_current();
|
||||
assert!(!A.has_allocated_current());
|
||||
|
||||
no_op();
|
||||
assert!(!A.has_allocated_current());
|
||||
}
|
||||
|
||||
#[inline(never)]
|
||||
fn allocates() {
|
||||
let _x = Box::new(42);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn alloc_during_func_call() {
|
||||
A.reset_allocation_state();
|
||||
A.enable_recording_current();
|
||||
assert!(!A.has_allocated_current());
|
||||
|
||||
allocates();
|
||||
assert!(A.has_allocated_current());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allocates_unrecorded() {
|
||||
A.reset_allocation_state();
|
||||
assert!(!A.has_allocated_current());
|
||||
|
||||
allocates();
|
||||
assert!(!A.has_allocated_current());
|
||||
}
|
@ -1,18 +0,0 @@
|
||||
extern crate qadapt;
|
||||
|
||||
use qadapt::QADAPT;
|
||||
|
||||
#[global_allocator]
|
||||
static A: QADAPT = QADAPT::INIT;
|
||||
|
||||
#[test]
|
||||
fn init() {
|
||||
assert!(!A.has_allocated_current());
|
||||
A.reset_allocation_state();
|
||||
A.enable_recording_current();
|
||||
|
||||
assert!(!A.has_allocated_current());
|
||||
|
||||
let _x = Box::new(42);
|
||||
assert!(A.has_allocated_current());
|
||||
}
|
30
tests/simple.rs
Normal file
30
tests/simple.rs
Normal file
@ -0,0 +1,30 @@
|
||||
#![feature(asm)]
|
||||
|
||||
extern crate qadapt;
|
||||
|
||||
use qadapt::QADAPT;
|
||||
use qadapt::set_panic;
|
||||
|
||||
#[global_allocator]
|
||||
static Q: QADAPT = QADAPT;
|
||||
|
||||
pub fn black_box<T>(dummy: T) -> T {
|
||||
// Taken from test lib, need to mark the arg as non-introspectable
|
||||
unsafe {asm!("" : : "r"(&dummy))}
|
||||
dummy
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_copy() {
|
||||
set_panic(true);
|
||||
black_box(0u8);
|
||||
set_panic(false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_allocate() {
|
||||
set_panic(true);
|
||||
let _x = Box::new(12);
|
||||
set_panic(false);
|
||||
}
|
Loading…
Reference in New Issue
Block a user