use std::sync::Arc;
use crate::{Backtrace, BacktraceHash};
use crate::CountAndSize;
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub struct PtrHash(u64);
impl nohash_hasher::IsEnabled for PtrHash {}
impl PtrHash {
#[inline]
pub fn new(ptr: *mut u8) -> Self {
let hash = ahash::RandomState::with_seeds(1, 2, 3, 4).hash_one(ptr);
Self(hash)
}
}
#[derive(Clone)]
pub struct ReadableBacktrace {
readable: Arc<str>,
}
impl std::fmt::Display for ReadableBacktrace {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.readable.fmt(f)
}
}
impl ReadableBacktrace {
fn new(mut backtrace: Backtrace) -> Self {
Self {
readable: backtrace.format(),
}
}
}
#[derive(Clone)]
pub struct CallstackStatistics {
pub readable_backtrace: ReadableBacktrace,
pub stochastic_rate: usize,
pub extant: CountAndSize,
}
pub struct AllocationTracker {
stochastic_rate: usize,
readable_backtraces: nohash_hasher::IntMap<BacktraceHash, ReadableBacktrace>,
live_allocs: ahash::HashMap<PtrHash, BacktraceHash>,
callstack_stats: nohash_hasher::IntMap<BacktraceHash, CountAndSize>,
}
impl AllocationTracker {
pub fn with_stochastic_rate(stochastic_rate: usize) -> Self {
assert!(stochastic_rate != 0);
assert!(stochastic_rate.is_power_of_two());
Self {
stochastic_rate,
readable_backtraces: Default::default(),
live_allocs: Default::default(),
callstack_stats: Default::default(),
}
}
fn should_sample(&self, ptr: PtrHash) -> bool {
ptr.0 & (self.stochastic_rate as u64 - 1) == 0
}
pub fn on_alloc(&mut self, ptr: PtrHash, size: usize) {
if !self.should_sample(ptr) {
return;
}
let unresolved_backtrace = Backtrace::new_unresolved();
let hash = BacktraceHash::new(&unresolved_backtrace);
self.readable_backtraces
.entry(hash)
.or_insert_with(|| ReadableBacktrace::new(unresolved_backtrace));
{
self.callstack_stats.entry(hash).or_default().add(size);
}
self.live_allocs.insert(ptr, hash);
}
pub fn on_dealloc(&mut self, ptr: PtrHash, size: usize) {
if !self.should_sample(ptr) {
return;
}
if let Some(hash) = self.live_allocs.remove(&ptr) {
if let std::collections::hash_map::Entry::Occupied(mut entry) =
self.callstack_stats.entry(hash)
{
let stats = entry.get_mut();
stats.sub(size);
if stats.size == 0 {
entry.remove();
}
}
}
}
pub fn top_callstacks(&self, n: usize) -> Vec<CallstackStatistics> {
let mut vec: Vec<_> = self
.callstack_stats
.iter()
.filter(|(_hash, c)| c.count > 0)
.filter_map(|(hash, c)| {
Some(CallstackStatistics {
readable_backtrace: self.readable_backtraces.get(hash)?.clone(),
stochastic_rate: self.stochastic_rate,
extant: *c,
})
})
.collect();
vec.sort_by_key(|stats| -(stats.extant.size as i64));
vec.truncate(n);
vec.shrink_to_fit();
vec
}
}