no_std for the time being

Impossible to maintain long-term because of backtrace crate/thread ID
pull/3/head
Bradlee Speice 2018-09-22 17:26:52 -04:00
parent aafec768ca
commit 5755eccae7
4 changed files with 52 additions and 32 deletions

View File

@ -5,9 +5,5 @@ authors = ["Bradlee Speice <bradlee@speice.io>"]
description = "The Quick And Dirty Allocation Profiling Tool"
[dependencies]
backtrace = "0.3"
[dependencies.libc]
default-features = false
features = []
version = "0.2"
spin = "0.4"
libc = { version = "0.2", default-features = false }

View File

@ -1,41 +1,42 @@
extern crate backtrace;
extern crate libc;
#![no_std]
extern crate libc;
extern crate spin;
use backtrace::Backtrace;
use libc::c_void;
use libc::free;
use libc::malloc;
use std::alloc::Layout;
use std::alloc::GlobalAlloc;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use core::alloc::Layout;
use core::alloc::GlobalAlloc;
use core::sync::atomic::AtomicBool;
use core::sync::atomic::Ordering;
mod const_init;
use const_init::ConstInit;
static mut INIT_ALLOCATIONS: u32 = 576;
static INTERNAL_ALLOCATION: AtomicBool = AtomicBool::new(false);
pub struct QADAPTInternal {
pub has_allocated: AtomicBool
}
pub struct QADAPT {
pub has_allocated: AtomicBool
internal: spin::Once<QADAPTInternal>
}
impl ConstInit for QADAPT {
const INIT: QADAPT = QADAPT {
has_allocated: AtomicBool::new(false)
internal: spin::Once::new()
};
}
unsafe impl GlobalAlloc for QADAPT {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let block = malloc(layout.size()) as *mut u8;
if INIT_ALLOCATIONS > 0 {
INIT_ALLOCATIONS -= 1;
} else {
self.has_allocated.store(true, Ordering::SeqCst);
if !INTERNAL_ALLOCATION.load(Ordering::SeqCst) {
self.internal().has_allocated.store(true, Ordering::SeqCst);
}
block
malloc(layout.size()) as *mut u8
}
unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) {
@ -46,7 +47,24 @@ unsafe impl GlobalAlloc for QADAPT {
impl QADAPT {
pub const INIT: Self = <Self as ConstInit>::INIT;
fn internal(&self) -> &QADAPTInternal {
self.internal.call_once(|| {
INTERNAL_ALLOCATION.store(true, Ordering::SeqCst);
let q = QADAPTInternal {
has_allocated: AtomicBool::new(false)
};
INTERNAL_ALLOCATION.store(false, Ordering::SeqCst);
q
})
}
pub fn clear_allocations(&self) {
self.has_allocated.store(false, Ordering::Release)
self.internal().has_allocated.store(false, Ordering::SeqCst);
}
pub fn has_allocated(&self) -> bool {
self.internal().has_allocated.load(Ordering::SeqCst)
}
}

View File

@ -25,20 +25,20 @@ struct NonEmpty {
#[test]
fn allocation_flag() {
A.clear_allocations();
assert!(!A.has_allocated.load(Ordering::SeqCst));
assert!(!A.has_allocated());
let _x = 24;
assert!(!A.has_allocated.load(Ordering::SeqCst));
assert!(!A.has_allocated());
let _x = Empty {};
assert!(!A.has_allocated.load(Ordering::SeqCst));
assert!(!A.has_allocated());
let _x = NonEmpty {
_x: 42,
_y: 84
};
assert!(!A.has_allocated.load(Ordering::SeqCst));
assert!(!A.has_allocated());
let _x = Box::new(42);
assert!(A.has_allocated.load(Ordering::SeqCst));
assert!(A.has_allocated());
}

View File

@ -8,7 +8,13 @@ static A: QADAPT = QADAPT::INIT;
#[test]
fn init() {
// Make sure that we don't have any allocations at the start
// that pollute other tests
assert!(!A.has_allocated.load(Ordering::SeqCst));
// Because the Allocator and its internals isn't the only "pre-main" allocation
// that happens, when starting up we expect to see that A has in fact allocated
assert!(A.has_allocated());
A.clear_allocations();
assert!(!A.has_allocated());
let _x = Box::new(42);
assert!(A.has_allocated());
}