#![deny(missing_docs)]
#![deny(rustdoc::missing_doc_code_examples)]
#![deny(missing_debug_implementations)]
use backtrace::SymbolName;
use lazy_static::lazy_static;
use mintex::Mutex;
use rustc_hash::FxHashMap;
use serde::Serialize;
use std::alloc::{GlobalAlloc, Layout, System};
use std::cell::Cell;
use std::fs::File;
use std::hash::{Hash, Hasher};
use std::io::BufWriter;
use std::ops::AddAssign;
use std::path::{Path, PathBuf};
use std::time::{Duration, Instant};
use thousands::Separable;
lazy_static! {
static ref TRI_GLOBALS: Mutex<Phase<Globals>> = Mutex::new(Phase::Ready);
}
#[derive(PartialEq)]
enum Phase<T> {
Ready,
Running(T),
PostAssert,
}
#[derive(PartialEq)]
enum TB {
Top,
Bottom,
}
struct Globals {
file_name: PathBuf,
testing: bool,
trim_backtraces: Option<usize>,
eprint_json: bool,
start_bt: Backtrace,
frames_to_trim: Option<FxHashMap<usize, TB>>,
start_instant: Instant,
pp_infos: Vec<PpInfo>,
backtraces: FxHashMap<Backtrace, usize>,
total_blocks: u64, total_bytes: u64,
heap: Option<HeapGlobals>,
}
struct HeapGlobals {
live_blocks: FxHashMap<usize, LiveBlock>,
curr_blocks: usize,
curr_bytes: usize,
max_blocks: usize,
max_bytes: usize,
tgmax_instant: Instant,
}
impl Globals {
fn new(
testing: bool,
file_name: PathBuf,
trim_backtraces: Option<usize>,
eprint_json: bool,
heap: Option<HeapGlobals>,
) -> Self {
Self {
testing,
file_name,
trim_backtraces,
eprint_json,
start_bt: new_backtrace_inner(None, &FxHashMap::default()),
frames_to_trim: None,
start_instant: Instant::now(),
pp_infos: Vec::default(),
backtraces: FxHashMap::default(),
total_blocks: 0,
total_bytes: 0,
heap,
}
}
fn get_pp_info<F: FnOnce() -> PpInfo>(&mut self, bt: Backtrace, new: F) -> usize {
let pp_infos = &mut self.pp_infos;
*self.backtraces.entry(bt).or_insert_with(|| {
let pp_info_idx = pp_infos.len();
pp_infos.push(new());
pp_info_idx
})
}
fn record_block(&mut self, ptr: *mut u8, pp_info_idx: usize, now: Instant) {
let h = self.heap.as_mut().unwrap();
let old = h.live_blocks.insert(
ptr as usize,
LiveBlock {
pp_info_idx,
allocation_instant: now,
},
);
std::assert!(matches!(old, None));
}
fn update_counts_for_alloc(
&mut self,
pp_info_idx: usize,
size: usize,
delta: Option<Delta>,
now: Instant,
) {
self.total_blocks += 1;
self.total_bytes += size as u64;
let h = self.heap.as_mut().unwrap();
if let Some(delta) = delta {
h.curr_blocks += 0; h.curr_bytes += delta;
} else {
h.curr_blocks += 1;
h.curr_bytes += size;
}
if h.curr_bytes >= h.max_bytes {
h.max_blocks = h.curr_blocks;
h.max_bytes = h.curr_bytes;
h.tgmax_instant = now;
}
self.pp_infos[pp_info_idx].update_counts_for_alloc(size, delta);
}
fn update_counts_for_dealloc(
&mut self,
pp_info_idx: usize,
size: usize,
alloc_duration: Duration,
) {
let h = self.heap.as_mut().unwrap();
h.curr_blocks -= 1;
h.curr_bytes -= size;
self.pp_infos[pp_info_idx].update_counts_for_dealloc(size, alloc_duration);
}
fn update_counts_for_ad_hoc_event(&mut self, pp_info_idx: usize, weight: usize) {
std::assert!(self.heap.is_none());
self.total_blocks += 1;
self.total_bytes += weight as u64;
self.pp_infos[pp_info_idx].update_counts_for_ad_hoc_event(weight);
}
fn check_for_global_peak(&mut self) {
let h = self.heap.as_mut().unwrap();
if h.curr_bytes == h.max_bytes {
for pp_info in self.pp_infos.iter_mut() {
let h = pp_info.heap.as_mut().unwrap();
h.at_tgmax_blocks = h.curr_blocks;
h.at_tgmax_bytes = h.curr_bytes;
}
}
}
fn get_heap_stats(&self) -> HeapStats {
match &self.heap {
Some(heap) => HeapStats {
total_blocks: self.total_blocks,
total_bytes: self.total_bytes,
curr_blocks: heap.curr_blocks,
curr_bytes: heap.curr_bytes,
max_blocks: heap.max_blocks,
max_bytes: heap.max_bytes,
},
None => panic!("dhat: getting heap stats while doing ad hoc profiling"),
}
}
fn get_ad_hoc_stats(&self) -> AdHocStats {
match self.heap {
None => AdHocStats {
total_events: self.total_blocks,
total_units: self.total_bytes,
},
Some(_) => panic!("dhat: getting ad hoc stats while doing heap profiling"),
}
}
fn finish(mut self, memory_output: Option<&mut String>) {
let now = Instant::now();
if self.heap.is_some() {
self.check_for_global_peak();
let h = self.heap.as_ref().unwrap();
for &LiveBlock {
pp_info_idx,
allocation_instant,
} in h.live_blocks.values()
{
self.pp_infos[pp_info_idx]
.heap
.as_mut()
.unwrap()
.total_lifetimes_duration += now.duration_since(allocation_instant);
}
}
let mut ftbl_indices: FxHashMap<String, usize> = FxHashMap::default();
ftbl_indices.insert("[root]".to_string(), 0);
let mut next_ftbl_idx = 1;
let pps: Vec<_> = std::mem::take(&mut self.backtraces)
.into_iter()
.map(|(mut bt, pp_info_idx)| {
bt.0.resolve();
let first_symbol_to_show = if self.trim_backtraces.is_some() {
if self.heap.is_some() {
bt.first_heap_symbol_to_show()
} else {
bt.first_ad_hoc_symbol_to_show()
}
} else {
0
};
let mut fs = vec![];
let mut i = 0;
for frame in bt.0.frames().iter() {
for symbol in frame.symbols().iter() {
i += 1;
if (i - 1) < first_symbol_to_show {
continue;
}
let s = Backtrace::frame_to_string(frame, symbol);
let &mut ftbl_idx = ftbl_indices.entry(s).or_insert_with(|| {
next_ftbl_idx += 1;
next_ftbl_idx - 1
});
fs.push(ftbl_idx);
}
}
PpInfoJson::new(&self.pp_infos[pp_info_idx], fs)
})
.collect();
let mut ftbl = vec![String::new(); ftbl_indices.len()];
for (frame, ftbl_idx) in ftbl_indices.into_iter() {
ftbl[ftbl_idx] = frame;
}
let h = self.heap.as_ref();
let is_heap = h.is_some();
let json = DhatJson {
dhatFileVersion: 2,
mode: if is_heap { "rust-heap" } else { "rust-ad-hoc" },
verb: "Allocated",
bklt: is_heap,
bkacc: false,
bu: if is_heap { None } else { Some("unit") },
bsu: if is_heap { None } else { Some("units") },
bksu: if is_heap { None } else { Some("events") },
tu: "µs",
Mtu: "s",
tuth: if is_heap { Some(10) } else { None },
cmd: std::env::args().collect::<Vec<_>>().join(" "),
pid: std::process::id(),
tg: h.map(|h| {
h.tgmax_instant
.saturating_duration_since(self.start_instant)
.as_micros()
}),
te: now.duration_since(self.start_instant).as_micros(),
pps,
ftbl,
};
eprintln!(
"dhat: Total: {} {} in {} {}",
self.total_bytes.separate_with_commas(),
json.bsu.unwrap_or("bytes"),
self.total_blocks.separate_with_commas(),
json.bksu.unwrap_or("blocks"),
);
if let Some(h) = &self.heap {
eprintln!(
"dhat: At t-gmax: {} bytes in {} blocks",
h.max_bytes.separate_with_commas(),
h.max_blocks.separate_with_commas(),
);
eprintln!(
"dhat: At t-end: {} bytes in {} blocks",
h.curr_bytes.separate_with_commas(),
h.curr_blocks.separate_with_commas(),
);
}
if let Some(memory_output) = memory_output {
*memory_output = serde_json::to_string_pretty(&json).unwrap();
eprintln!("dhat: The data has been saved to the memory buffer");
} else {
let write = || -> std::io::Result<()> {
let buffered_file = BufWriter::new(File::create(&self.file_name)?);
let formatter = serde_json::ser::PrettyFormatter::with_indent(b"");
let mut ser = serde_json::Serializer::with_formatter(buffered_file, formatter);
json.serialize(&mut ser)?;
Ok(())
};
match write() {
Ok(()) => eprintln!(
"dhat: The data has been saved to {}, and is viewable with dhat/dh_view.html",
self.file_name.to_string_lossy()
),
Err(e) => eprintln!(
"dhat: error: Writing to {} failed: {}",
self.file_name.to_string_lossy(),
e
),
}
}
if self.eprint_json {
eprintln!(
"dhat: json = `{}`",
serde_json::to_string_pretty(&json).unwrap()
);
}
}
}
impl HeapGlobals {
fn new() -> Self {
Self {
live_blocks: FxHashMap::default(),
curr_blocks: 0,
curr_bytes: 0,
max_blocks: 0,
max_bytes: 0,
tgmax_instant: Instant::now(),
}
}
}
struct PpInfo {
total_blocks: u64,
total_bytes: u64,
heap: Option<HeapPpInfo>,
}
#[derive(Default)]
struct HeapPpInfo {
curr_blocks: usize,
curr_bytes: usize,
max_blocks: usize,
max_bytes: usize,
at_tgmax_blocks: usize,
at_tgmax_bytes: usize,
total_lifetimes_duration: Duration,
}
impl PpInfo {
fn new_heap() -> Self {
Self {
total_blocks: 0,
total_bytes: 0,
heap: Some(HeapPpInfo::default()),
}
}
fn new_ad_hoc() -> Self {
Self {
total_blocks: 0,
total_bytes: 0,
heap: None,
}
}
fn update_counts_for_alloc(&mut self, size: usize, delta: Option<Delta>) {
self.total_blocks += 1;
self.total_bytes += size as u64;
let h = self.heap.as_mut().unwrap();
if let Some(delta) = delta {
h.curr_blocks += 0; h.curr_bytes += delta;
} else {
h.curr_blocks += 1;
h.curr_bytes += size;
}
if h.curr_bytes >= h.max_bytes {
h.max_blocks = h.curr_blocks;
h.max_bytes = h.curr_bytes;
}
}
fn update_counts_for_dealloc(&mut self, size: usize, alloc_duration: Duration) {
let h = self.heap.as_mut().unwrap();
h.curr_blocks -= 1;
h.curr_bytes -= size;
h.total_lifetimes_duration += alloc_duration;
}
fn update_counts_for_ad_hoc_event(&mut self, weight: usize) {
std::assert!(self.heap.is_none());
self.total_blocks += 1;
self.total_bytes += weight as u64;
}
}
struct LiveBlock {
pp_info_idx: usize,
allocation_instant: Instant,
}
struct IgnoreAllocs {
was_already_ignoring_allocs: bool,
}
thread_local!(static IGNORE_ALLOCS: Cell<bool> = Cell::new(false));
impl IgnoreAllocs {
fn new() -> Self {
Self {
was_already_ignoring_allocs: IGNORE_ALLOCS.with(|b| b.replace(true)),
}
}
}
impl Drop for IgnoreAllocs {
fn drop(&mut self) {
if !self.was_already_ignoring_allocs {
IGNORE_ALLOCS.with(|b| b.set(false));
}
}
}
#[derive(Debug)]
pub struct Profiler;
impl Profiler {
pub fn new_heap() -> Self {
Self::builder().build()
}
pub fn new_ad_hoc() -> Self {
Self::builder().ad_hoc().build()
}
pub fn builder() -> ProfilerBuilder {
ProfilerBuilder {
ad_hoc: false,
testing: false,
file_name: None,
trim_backtraces: Some(10),
eprint_json: false,
}
}
}
#[derive(Debug)]
pub struct ProfilerBuilder {
ad_hoc: bool,
testing: bool,
file_name: Option<PathBuf>,
trim_backtraces: Option<usize>,
eprint_json: bool,
}
impl ProfilerBuilder {
pub fn ad_hoc(mut self) -> Self {
self.ad_hoc = true;
self
}
pub fn testing(mut self) -> Self {
self.testing = true;
self
}
pub fn file_name<P: AsRef<Path>>(mut self, file_name: P) -> Self {
self.file_name = Some(file_name.as_ref().to_path_buf());
self
}
pub fn trim_backtraces(mut self, max_frames: Option<usize>) -> Self {
self.trim_backtraces = max_frames.map(|m| std::cmp::max(m, 4));
self
}
#[doc(hidden)]
pub fn eprint_json(mut self) -> Self {
self.eprint_json = true;
self
}
pub fn build(self) -> Profiler {
let ignore_allocs = IgnoreAllocs::new();
std::assert!(!ignore_allocs.was_already_ignoring_allocs);
let phase: &mut Phase<Globals> = &mut TRI_GLOBALS.lock();
match phase {
Phase::Ready => {
let file_name = if let Some(file_name) = self.file_name {
file_name
} else if !self.ad_hoc {
PathBuf::from("dhat-heap.json")
} else {
PathBuf::from("dhat-ad-hoc.json")
};
let h = if !self.ad_hoc {
Some(HeapGlobals::new())
} else {
None
};
*phase = Phase::Running(Globals::new(
self.testing,
file_name,
self.trim_backtraces,
self.eprint_json,
h,
));
}
Phase::Running(_) | Phase::PostAssert => {
panic!("dhat: creating a profiler while a profiler is already running")
}
}
Profiler
}
}
macro_rules! new_backtrace {
($g:expr) => {{
if $g.frames_to_trim.is_none() {
let bt = new_backtrace_inner(None, &FxHashMap::default());
$g.frames_to_trim = Some(bt.get_frames_to_trim(&$g.start_bt));
}
new_backtrace_inner($g.trim_backtraces, $g.frames_to_trim.as_ref().unwrap())
}};
}
#[inline(never)]
fn new_backtrace_inner(
trim_backtraces: Option<usize>,
frames_to_trim: &FxHashMap<usize, TB>,
) -> Backtrace {
let mut frames = Vec::new();
backtrace::trace(|frame| {
let ip = frame.ip() as usize;
if trim_backtraces.is_some() {
match frames_to_trim.get(&ip) {
Some(TB::Top) => return true, Some(TB::Bottom) => return false, _ => {} }
}
frames.push(frame.clone().into());
if let Some(max_frames) = trim_backtraces {
frames.len() < max_frames } else {
true }
});
Backtrace(frames.into())
}
#[derive(Debug)]
pub struct Alloc;
unsafe impl GlobalAlloc for Alloc {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let ignore_allocs = IgnoreAllocs::new();
if ignore_allocs.was_already_ignoring_allocs {
System.alloc(layout)
} else {
let phase: &mut Phase<Globals> = &mut TRI_GLOBALS.lock();
let ptr = System.alloc(layout);
if ptr.is_null() {
return ptr;
}
if let Phase::Running(g @ Globals { heap: Some(_), .. }) = phase {
let size = layout.size();
let bt = new_backtrace!(g);
let pp_info_idx = g.get_pp_info(bt, PpInfo::new_heap);
let now = Instant::now();
g.record_block(ptr, pp_info_idx, now);
g.update_counts_for_alloc(pp_info_idx, size, None, now);
}
ptr
}
}
unsafe fn realloc(&self, old_ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
let ignore_allocs = IgnoreAllocs::new();
if ignore_allocs.was_already_ignoring_allocs {
System.realloc(old_ptr, layout, new_size)
} else {
let phase: &mut Phase<Globals> = &mut TRI_GLOBALS.lock();
let new_ptr = System.realloc(old_ptr, layout, new_size);
if new_ptr.is_null() {
return new_ptr;
}
if let Phase::Running(g @ Globals { heap: Some(_), .. }) = phase {
let old_size = layout.size();
let delta = Delta::new(old_size, new_size);
if delta.shrinking {
g.check_for_global_peak();
}
let h = g.heap.as_mut().unwrap();
let live_block = h.live_blocks.remove(&(old_ptr as usize));
let (pp_info_idx, delta) = if let Some(live_block) = live_block {
(live_block.pp_info_idx, Some(delta))
} else {
let bt = new_backtrace!(g);
let pp_info_idx = g.get_pp_info(bt, PpInfo::new_heap);
(pp_info_idx, None)
};
let now = Instant::now();
g.record_block(new_ptr, pp_info_idx, now);
g.update_counts_for_alloc(pp_info_idx, new_size, delta, now);
}
new_ptr
}
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
let ignore_allocs = IgnoreAllocs::new();
if ignore_allocs.was_already_ignoring_allocs {
System.dealloc(ptr, layout)
} else {
let phase: &mut Phase<Globals> = &mut TRI_GLOBALS.lock();
System.dealloc(ptr, layout);
if let Phase::Running(g @ Globals { heap: Some(_), .. }) = phase {
let size = layout.size();
let h = g.heap.as_mut().unwrap();
if let Some(LiveBlock {
pp_info_idx,
allocation_instant,
}) = h.live_blocks.remove(&(ptr as usize))
{
g.check_for_global_peak();
let alloc_duration = allocation_instant.elapsed();
g.update_counts_for_dealloc(pp_info_idx, size, alloc_duration);
}
}
}
}
}
pub fn ad_hoc_event(weight: usize) {
let ignore_allocs = IgnoreAllocs::new();
std::assert!(!ignore_allocs.was_already_ignoring_allocs);
let phase: &mut Phase<Globals> = &mut TRI_GLOBALS.lock();
if let Phase::Running(g @ Globals { heap: None, .. }) = phase {
let bt = new_backtrace!(g);
let pp_info_idx = g.get_pp_info(bt, PpInfo::new_ad_hoc);
g.update_counts_for_ad_hoc_event(pp_info_idx, weight);
}
}
impl Profiler {
fn drop_inner(&mut self, memory_output: Option<&mut String>) {
let ignore_allocs = IgnoreAllocs::new();
std::assert!(!ignore_allocs.was_already_ignoring_allocs);
let phase: &mut Phase<Globals> = &mut TRI_GLOBALS.lock();
match std::mem::replace(phase, Phase::Ready) {
Phase::Ready => unreachable!(),
Phase::Running(g) => {
if !g.testing {
g.finish(memory_output)
}
}
Phase::PostAssert => {}
}
}
#[doc(hidden)]
pub fn drop_and_get_memory_output(&mut self) -> String {
let mut memory_output = String::new();
self.drop_inner(Some(&mut memory_output));
memory_output
}
}
impl Drop for Profiler {
fn drop(&mut self) {
self.drop_inner(None);
}
}
#[derive(Debug)]
struct Backtrace(backtrace::Backtrace);
impl Backtrace {
fn get_frames_to_trim(&self, start_bt: &Backtrace) -> FxHashMap<usize, TB> {
let mut frames_to_trim = FxHashMap::default();
let frames1 = self.0.frames();
let frames2 = start_bt.0.frames();
let (mut i1, mut i2) = (0, 0);
loop {
if i1 == frames1.len() - 1 || i2 == frames2.len() - 1 {
frames_to_trim.retain(|_, v| *v == TB::Bottom);
break;
}
if frames1[i1].ip() != frames2[i2].ip() {
break;
}
frames_to_trim.insert(frames1[i1].ip() as usize, TB::Top);
i1 += 1;
i2 += 1;
}
let (mut i1, mut i2) = (frames1.len() - 1, frames2.len() - 1);
loop {
if i1 == 0 || i2 == 0 {
frames_to_trim.retain(|_, v| *v == TB::Top);
break;
}
if frames1[i1].ip() != frames2[i2].ip() {
break;
}
frames_to_trim.insert(frames1[i1].ip() as usize, TB::Bottom);
i1 -= 1;
i2 -= 1;
}
frames_to_trim
}
fn first_heap_symbol_to_show(&self) -> usize {
self.first_symbol_to_show(|s| {
s.starts_with("alloc::alloc::")
|| s.starts_with("<alloc::alloc::")
|| s.starts_with("<dhat::Alloc")
|| s.starts_with("__rg_")
})
}
fn first_ad_hoc_symbol_to_show(&self) -> usize {
0
}
fn first_symbol_to_show<P: Fn(&str) -> bool>(&self, p: P) -> usize {
let symbols: Vec<_> = self
.0
.frames()
.iter()
.flat_map(|f| f.symbols().iter())
.collect();
for (i, symbol) in symbols.iter().enumerate().rev() {
if let Some(s) = symbol.name().map(|name| format!("{:#}", name)) {
if p(&s) {
return i;
}
}
}
0
}
#[allow(dead_code)]
fn eprint(&self) {
for frame in self.0.frames().iter() {
for symbol in frame.symbols().iter() {
eprintln!("{}", Backtrace::frame_to_string(frame, symbol));
}
}
}
fn frame_to_string(
frame: &backtrace::BacktraceFrame,
symbol: &backtrace::BacktraceSymbol,
) -> String {
format!(
"{:?}: {:#} ({:#}:{}:{})",
frame.ip(),
symbol.name().unwrap_or_else(|| SymbolName::new(b"???")),
match symbol.filename() {
Some(path) => trim_path(path),
None => Path::new("???"),
}
.display(),
symbol.lineno().unwrap_or(0),
symbol.colno().unwrap_or(0),
)
}
}
impl PartialEq for Backtrace {
fn eq(&self, other: &Self) -> bool {
let mut frames1 = self.0.frames().iter();
let mut frames2 = other.0.frames().iter();
loop {
let ip1 = frames1.next().map(|f| f.ip());
let ip2 = frames2.next().map(|f| f.ip());
if ip1 != ip2 {
return false;
}
if ip1 == None {
return true;
}
}
}
}
impl Eq for Backtrace {}
impl Hash for Backtrace {
fn hash<H: Hasher>(&self, state: &mut H) {
for frame in self.0.frames().iter() {
frame.ip().hash(state);
}
}
}
fn trim_path(path: &Path) -> &Path {
const N: usize = 3;
let len = path.components().count();
if len > N {
let mut c = path.components();
c.nth(len - (N + 1));
c.as_path()
} else {
path
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[non_exhaustive]
pub struct HeapStats {
pub total_blocks: u64,
pub total_bytes: u64,
pub curr_blocks: usize,
pub curr_bytes: usize,
pub max_blocks: usize,
pub max_bytes: usize,
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[non_exhaustive]
pub struct AdHocStats {
pub total_events: u64,
pub total_units: u64,
}
impl HeapStats {
pub fn get() -> Self {
let ignore_allocs = IgnoreAllocs::new();
std::assert!(!ignore_allocs.was_already_ignoring_allocs);
let phase: &mut Phase<Globals> = &mut TRI_GLOBALS.lock();
match phase {
Phase::Ready => {
panic!("dhat: getting heap stats when no profiler is running")
}
Phase::Running(g) => g.get_heap_stats(),
Phase::PostAssert => {
panic!("dhat: getting heap stats after the profiler has asserted")
}
}
}
}
impl AdHocStats {
pub fn get() -> Self {
let ignore_allocs = IgnoreAllocs::new();
std::assert!(!ignore_allocs.was_already_ignoring_allocs);
let phase: &mut Phase<Globals> = &mut TRI_GLOBALS.lock();
match phase {
Phase::Ready => {
panic!("dhat: getting ad hoc stats when no profiler is running")
}
Phase::Running(g) => g.get_ad_hoc_stats(),
Phase::PostAssert => {
panic!("dhat: getting ad hoc stats after the profiler has asserted")
}
}
}
}
#[doc(hidden)]
pub fn check_assert_condition<F>(cond: F) -> bool
where
F: FnOnce() -> bool,
{
let ignore_allocs = IgnoreAllocs::new();
std::assert!(!ignore_allocs.was_already_ignoring_allocs);
let phase: &mut Phase<Globals> = &mut TRI_GLOBALS.lock();
match phase {
Phase::Ready => panic!("dhat: asserting when no profiler is running"),
Phase::Running(g) => {
if !g.testing {
panic!("dhat: asserting while not in testing mode");
}
if cond() {
return false;
}
}
Phase::PostAssert => panic!("dhat: asserting after the profiler has asserted"),
}
match std::mem::replace(phase, Phase::PostAssert) {
Phase::Ready => unreachable!(),
Phase::Running(g) => {
g.finish(None);
true
}
Phase::PostAssert => unreachable!(),
}
}
#[macro_export]
macro_rules! assert {
($cond:expr) => ({
if dhat::check_assert_condition(|| $cond) {
panic!("dhat: assertion failed: {}", stringify!($cond));
}
});
($cond:expr, $($arg:tt)+) => ({
if dhat::check_assert_condition(|| $cond) {
panic!("dhat: assertion failed: {}: {}", stringify!($cond), format_args!($($arg)+));
}
});
}
#[macro_export]
macro_rules! assert_eq {
($left:expr, $right:expr $(,)?) => ({
if dhat::check_assert_condition( || $left == $right) {
panic!(
"dhat: assertion failed: `(left == right)`\n left: `{:?}`,\n right: `{:?}`",
$left, $right
);
}
});
($left:expr, $right:expr, $($arg:tt)+) => ({
if dhat::check_assert_condition(|| $left == $right) {
panic!(
"dhat: assertion failed: `(left == right)`\n left: `{:?}`,\n right: `{:?}`: {}",
$left, $right, format_args!($($arg)+)
);
}
});
}
#[macro_export]
macro_rules! assert_ne {
($left:expr, $right:expr) => ({
if dhat::check_assert_condition(|| $left != $right) {
panic!(
"dhat: assertion failed: `(left != right)`\n left: `{:?}`,\n right: `{:?}`",
$left, $right
);
}
});
($left:expr, $right:expr, $($arg:tt)+) => ({
if dhat::check_assert_condition(|| $left != $right) {
panic!(
"dhat: assertion failed: `(left != right)`\n left: `{:?}`,\n right: `{:?}`: {}",
$left, $right, format_args!($($arg)+)
);
}
});
}
#[derive(Serialize)]
#[allow(non_snake_case)]
struct DhatJson {
dhatFileVersion: u32,
mode: &'static str,
verb: &'static str,
bklt: bool,
bkacc: bool,
#[serde(skip_serializing_if = "Option::is_none")]
bu: Option<&'static str>,
#[serde(skip_serializing_if = "Option::is_none")]
bsu: Option<&'static str>,
#[serde(skip_serializing_if = "Option::is_none")]
bksu: Option<&'static str>,
tu: &'static str,
Mtu: &'static str,
#[serde(skip_serializing_if = "Option::is_none")]
tuth: Option<usize>,
cmd: String,
pid: u32,
#[serde(skip_serializing_if = "Option::is_none")]
tg: Option<u128>,
te: u128,
pps: Vec<PpInfoJson>,
ftbl: Vec<String>,
}
#[derive(Serialize)]
struct PpInfoJson {
tb: u64,
tbk: u64,
#[serde(skip_serializing_if = "Option::is_none")]
tl: Option<u128>,
#[serde(skip_serializing_if = "Option::is_none")]
mb: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
mbk: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
gb: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
gbk: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
eb: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
ebk: Option<usize>,
fs: Vec<usize>,
}
impl PpInfoJson {
fn new(pp_info: &PpInfo, fs: Vec<usize>) -> Self {
if let Some(h) = &pp_info.heap {
Self {
tb: pp_info.total_bytes,
tbk: pp_info.total_blocks,
tl: Some(h.total_lifetimes_duration.as_micros()),
mb: Some(h.max_bytes),
mbk: Some(h.max_blocks),
gb: Some(h.at_tgmax_bytes),
gbk: Some(h.at_tgmax_blocks),
eb: Some(h.curr_bytes),
ebk: Some(h.curr_blocks),
fs,
}
} else {
Self {
tb: pp_info.total_bytes,
tbk: pp_info.total_blocks,
tl: None,
mb: None,
mbk: None,
gb: None,
gbk: None,
eb: None,
ebk: None,
fs,
}
}
}
}
#[derive(Clone, Copy)]
struct Delta {
shrinking: bool,
size: usize,
}
impl Delta {
fn new(old_size: usize, new_size: usize) -> Delta {
if new_size < old_size {
Delta {
shrinking: true,
size: old_size - new_size,
}
} else {
Delta {
shrinking: false,
size: new_size - old_size,
}
}
}
}
impl AddAssign<Delta> for usize {
fn add_assign(&mut self, rhs: Delta) {
if rhs.shrinking {
*self -= rhs.size;
} else {
*self += rhs.size;
}
}
}
impl AddAssign<Delta> for u64 {
fn add_assign(&mut self, rhs: Delta) {
if rhs.shrinking {
*self -= rhs.size as u64;
} else {
*self += rhs.size as u64;
}
}
}
#[doc(hidden)]
pub fn assert_is_panic<R, F: FnOnce() -> R + std::panic::UnwindSafe>(f: F, expected: &str) {
let res = std::panic::catch_unwind(f);
if let Err(err) = res {
if let Some(actual) = err.downcast_ref::<&str>() {
std::assert_eq!(expected, *actual);
} else if let Some(actual) = err.downcast_ref::<String>() {
std::assert_eq!(expected, actual);
} else {
panic!("assert_is_panic: Not a string: {:?}", err);
}
} else {
panic!("assert_is_panic: Not an error");
}
}
#[cfg(test)]
mod test {
use super::trim_path;
use std::path::Path;
#[test]
fn test_trim_path() {
std::assert_eq!(trim_path(Path::new("")), Path::new(""));
std::assert_eq!(trim_path(Path::new("/")), Path::new("/"));
std::assert_eq!(trim_path(Path::new("aa.rs")), Path::new("aa.rs"));
std::assert_eq!(trim_path(Path::new("/aa.rs")), Path::new("/aa.rs"));
std::assert_eq!(trim_path(Path::new("bb/aa.rs")), Path::new("bb/aa.rs"));
std::assert_eq!(trim_path(Path::new("/bb/aa.rs")), Path::new("/bb/aa.rs"));
std::assert_eq!(
trim_path(Path::new("cc/bb/aa.rs")),
Path::new("cc/bb/aa.rs")
);
std::assert_eq!(
trim_path(Path::new("/cc/bb/aa.rs")),
Path::new("cc/bb/aa.rs")
);
std::assert_eq!(
trim_path(Path::new("dd/cc/bb/aa.rs")),
Path::new("cc/bb/aa.rs")
);
std::assert_eq!(
trim_path(Path::new("/dd/cc/bb/aa.rs")),
Path::new("cc/bb/aa.rs")
);
}
}