iota_util_mem/
malloc_size.rs

1// Copyright (c) Mysten Labs, Inc.
2// Modifications Copyright (c) 2024 IOTA Stiftung
3// SPDX-License-Identifier: Apache-2.0
4
5// Copyright 2016-2017 The Servo Project Developers. See the COPYRIGHT
6// file at the top-level directory of this distribution and at
7// http://rust-lang.org/COPYRIGHT.
8//
9// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
10// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
11// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
12// option. This file may not be copied, modified, or distributed
13// except according to those terms.
14
15//! A crate for measuring the heap usage of data structures in a way that
16//! integrates with Firefox's memory reporting, particularly the use of
17//! mozjemalloc and DMD. In particular, it has the following features.
18//! - It isn't bound to a particular heap allocator.
19//! - It provides traits for both "shallow" and "deep" measurement, which gives
20//!   flexibility in the cases where the traits can't be used.
21//! - It allows for measuring blocks even when only an interior pointer can be
22//!   obtained for heap allocations, e.g. `HashSet` and `HashMap`. (This relies
23//!   on the heap allocator having suitable support, which mozjemalloc has.)
24//! - It allows handling of types like `Rc` and `Arc` by providing traits that
25//!   are different to the ones for non-graph structures.
26//!
27//! Suggested uses are as follows.
28//! - When possible, use the `MallocSizeOf` trait. (Deriving support is provided
29//!   by the `malloc_size_of_derive` crate.)
30//! - If you need an additional synchronization argument, provide a function
31//!   that is like the standard trait method, but with the extra argument.
32//! - If you need multiple measurements for a type, provide a function named
33//!   `add_size_of` that takes a mutable reference to a struct that contains the
34//!   multiple measurement fields.
35//! - When deep measurement (via `MallocSizeOf`) cannot be implemented for a
36//!   type, shallow measurement (via `MallocShallowSizeOf`) in combination with
37//!   iteration can be a useful substitute.
38//! - `Rc` and `Arc` are always tricky, which is why `MallocSizeOf` is not (and
39//!   should not be) implemented for them.
40//! - If an `Rc` or `Arc` is known to be a "primary" reference and can always be
41//!   measured, it should be measured via the `MallocUnconditionalSizeOf` trait.
42//! - Using universal function call syntax is a good idea when measuring boxed
43//!   fields in structs, because it makes it clear that the Box is being
44//!   measured as well as the thing it points to. E.g. `<Box<_> as
45//!   MallocSizeOf>::size_of(field, ops)`.
46
47//! This is an extended version of the Servo internal malloc_size crate.
48//! We should occasionally track the upstream changes/fixes and reintroduce them
49//! here, whenever applicable.
50
51#[cfg(not(feature = "std"))]
52use alloc::vec::Vec;
53#[cfg(feature = "std")]
54mod rstd {
55    pub use std::*;
56}
57#[cfg(not(feature = "std"))]
58mod rstd {
59    pub use core::*;
60    pub mod collections {
61        pub use alloc::collections::*;
62
63        pub use vec_deque::VecDeque;
64    }
65}
66
67#[cfg(not(feature = "std"))]
68pub use alloc::boxed::Box;
69#[cfg(not(feature = "std"))]
70use core::ffi::c_void;
71#[cfg(feature = "std")]
72use std::hash::BuildHasher;
73#[cfg(feature = "std")]
74use std::os::raw::c_void;
75#[cfg(feature = "std")]
76use std::sync::Arc;
77
78#[cfg(feature = "std")]
79use rstd::hash::Hash;
80use rstd::{
81    marker::PhantomData,
82    mem::size_of,
83    ops::{Deref, DerefMut, Range},
84};
85
86/// A C function that takes a pointer to a heap allocation and returns its size.
87pub type VoidPtrToSizeFn = unsafe extern "C" fn(ptr: *const c_void) -> usize;
88
89/// A closure implementing a stateful predicate on pointers.
90pub type VoidPtrToBoolFnMut = dyn FnMut(*const c_void) -> bool;
91
92/// Operations used when measuring heap usage of data structures.
93pub struct MallocSizeOfOps {
94    /// A function that returns the size of a heap allocation.
95    size_of_op: VoidPtrToSizeFn,
96
97    /// Like `size_of_op`, but can take an interior pointer. Optional because
98    /// not all allocators support this operation. If it's not provided, some
99    /// memory measurements will actually be computed estimates rather than
100    /// real and accurate measurements.
101    enclosing_size_of_op: Option<VoidPtrToSizeFn>,
102
103    /// Check if a pointer has been seen before, and remember it for next time.
104    /// Useful when measuring `Rc`s and `Arc`s. Optional, because many places
105    /// don't need it.
106    have_seen_ptr_op: Option<Box<VoidPtrToBoolFnMut>>,
107}
108
109impl MallocSizeOfOps {
110    pub fn new(
111        size_of: VoidPtrToSizeFn,
112        malloc_enclosing_size_of: Option<VoidPtrToSizeFn>,
113        have_seen_ptr: Option<Box<VoidPtrToBoolFnMut>>,
114    ) -> Self {
115        MallocSizeOfOps {
116            size_of_op: size_of,
117            enclosing_size_of_op: malloc_enclosing_size_of,
118            have_seen_ptr_op: have_seen_ptr,
119        }
120    }
121
122    /// Check if an allocation is empty. This relies on knowledge of how Rust
123    /// handles empty allocations, which may change in the future.
124    fn is_empty<T: ?Sized>(ptr: *const T) -> bool {
125        // The correct condition is this:
126        //   `ptr as usize <= ::std::mem::align_of::<T>()`
127        // But we can't call align_of() on a ?Sized T. So we approximate it
128        // with the following. 256 is large enough that it should always be
129        // larger than the required alignment, but small enough that it is
130        // always in the first page of memory and therefore not a legitimate
131        // address.
132        return ptr as *const usize as usize <= 256;
133    }
134
135    /// Call `size_of_op` on `ptr`, first checking that the allocation isn't
136    /// empty, because some types (such as `Vec`) utilize empty allocations.
137    pub unsafe fn malloc_size_of<T: ?Sized>(&self, ptr: *const T) -> usize {
138        if MallocSizeOfOps::is_empty(ptr) {
139            0
140        } else {
141            (self.size_of_op)(ptr as *const c_void)
142        }
143    }
144
145    /// Is an `enclosing_size_of_op` available?
146    pub fn has_malloc_enclosing_size_of(&self) -> bool {
147        self.enclosing_size_of_op.is_some()
148    }
149
150    /// Call `enclosing_size_of_op`, which must be available, on `ptr`, which
151    /// must not be empty.
152    pub unsafe fn malloc_enclosing_size_of<T>(&self, ptr: *const T) -> usize {
153        assert!(!MallocSizeOfOps::is_empty(ptr));
154        (self.enclosing_size_of_op.unwrap())(ptr as *const c_void)
155    }
156
157    /// Call `have_seen_ptr_op` on `ptr`.
158    pub fn have_seen_ptr<T>(&mut self, ptr: *const T) -> bool {
159        let have_seen_ptr_op = self
160            .have_seen_ptr_op
161            .as_mut()
162            .expect("missing have_seen_ptr_op");
163        have_seen_ptr_op(ptr as *const c_void)
164    }
165}
166
167/// Trait for measuring the "deep" heap usage of a data structure. This is the
168/// most commonly-used of the traits.
169pub trait MallocSizeOf {
170    /// Measure the heap usage of all descendant heap-allocated structures, but
171    /// not the space taken up by the value itself.
172    /// If `T::size_of` is a constant, consider implementing `constant_size` as
173    /// well.
174    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
175
176    /// Used to optimize `MallocSizeOf` implementation for collections
177    /// like `Vec` and `HashMap` to avoid iterating over them unnecessarily.
178    /// The `Self: Sized` bound is for object safety.
179    fn constant_size() -> Option<usize>
180    where
181        Self: Sized,
182    {
183        None
184    }
185}
186
187/// Trait for measuring the "shallow" heap usage of a container.
188pub trait MallocShallowSizeOf {
189    /// Measure the heap usage of immediate heap-allocated descendant
190    /// structures, but not the space taken up by the value itself. Anything
191    /// beyond the immediate descendants must be measured separately, using
192    /// iteration.
193    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
194}
195
196/// Like `MallocSizeOf`, but with a different name so it cannot be used
197/// accidentally with derive(MallocSizeOf). For use with types like `Rc` and
198/// `Arc` when appropriate (e.g. when measuring a "primary" reference).
199pub trait MallocUnconditionalSizeOf {
200    /// Measure the heap usage of all heap-allocated descendant structures, but
201    /// not the space taken up by the value itself.
202    fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
203}
204
205/// `MallocUnconditionalSizeOf` combined with `MallocShallowSizeOf`.
206pub trait MallocUnconditionalShallowSizeOf {
207    /// `unconditional_size_of` combined with `shallow_size_of`.
208    fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
209}
210
211impl<'a, T: ?Sized> MallocSizeOf for &'a T {
212    fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
213        // Zero makes sense for a non-owning reference.
214        0
215    }
216    fn constant_size() -> Option<usize> {
217        Some(0)
218    }
219}
220
221impl<T: MallocSizeOf + ?Sized> MallocSizeOf for Box<T> {
222    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
223        self.shallow_size_of(ops) + (**self).size_of(ops)
224    }
225}
226
227#[impl_trait_for_tuples::impl_for_tuples(12)]
228impl MallocSizeOf for Tuple {
229    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
230        let mut result = 0;
231        for_tuples!( #( result += Tuple.size_of(ops); )* );
232        result
233    }
234    fn constant_size() -> Option<usize> {
235        let mut result = Some(0);
236        for_tuples!( #( result = result.and_then(|s| Tuple::constant_size().map(|t| s + t)); )* );
237        result
238    }
239}
240
241impl<T: MallocSizeOf> MallocSizeOf for Option<T> {
242    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
243        if let Some(val) = self.as_ref() {
244            val.size_of(ops)
245        } else {
246            0
247        }
248    }
249    fn constant_size() -> Option<usize> {
250        T::constant_size().filter(|s| *s == 0)
251    }
252}
253
254impl<T: MallocSizeOf, E: MallocSizeOf> MallocSizeOf for Result<T, E> {
255    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
256        match *self {
257            Ok(ref x) => x.size_of(ops),
258            Err(ref e) => e.size_of(ops),
259        }
260    }
261    fn constant_size() -> Option<usize> {
262        // Result<T, E> has constant size iff T::constant_size == E::constant_size
263        T::constant_size().and_then(|t| E::constant_size().filter(|e| *e == t))
264    }
265}
266
267impl<T: MallocSizeOf + Copy> MallocSizeOf for rstd::cell::Cell<T> {
268    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
269        self.get().size_of(ops)
270    }
271    fn constant_size() -> Option<usize> {
272        T::constant_size()
273    }
274}
275
276impl<T: MallocSizeOf> MallocSizeOf for rstd::cell::RefCell<T> {
277    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
278        self.borrow().size_of(ops)
279    }
280    fn constant_size() -> Option<usize> {
281        T::constant_size()
282    }
283}
284
285#[cfg(feature = "std")]
286impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
287where
288    B::Owned: MallocSizeOf,
289{
290    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
291        match *self {
292            std::borrow::Cow::Borrowed(_) => 0,
293            std::borrow::Cow::Owned(ref b) => b.size_of(ops),
294        }
295    }
296}
297
298impl<T: MallocSizeOf> MallocSizeOf for [T] {
299    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
300        let mut n = 0;
301        if let Some(t) = T::constant_size() {
302            n += self.len() * t;
303        } else {
304            n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
305        }
306        n
307    }
308}
309
310impl<T: MallocSizeOf> MallocSizeOf for Vec<T> {
311    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
312        let mut n = self.shallow_size_of(ops);
313        if let Some(t) = T::constant_size() {
314            n += self.len() * t;
315        } else {
316            n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
317        }
318        n
319    }
320}
321
322impl<T> MallocShallowSizeOf for rstd::collections::VecDeque<T> {
323    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
324        if ops.has_malloc_enclosing_size_of() {
325            if let Some(front) = self.front() {
326                // The front element is an interior pointer.
327                unsafe { ops.malloc_enclosing_size_of(&*front) }
328            } else {
329                // This assumes that no memory is allocated when the VecDeque is empty.
330                0
331            }
332        } else {
333            // An estimate.
334            self.capacity() * size_of::<T>()
335        }
336    }
337}
338
339impl<T: MallocSizeOf> MallocSizeOf for rstd::collections::VecDeque<T> {
340    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
341        let mut n = self.shallow_size_of(ops);
342        if let Some(t) = T::constant_size() {
343            n += self.len() * t;
344        } else {
345            n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
346        }
347        n
348    }
349}
350
351#[cfg(feature = "std")]
352impl<T, S> MallocShallowSizeOf for std::collections::HashSet<T, S>
353where
354    T: Eq + Hash,
355    S: BuildHasher,
356{
357    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
358        if ops.has_malloc_enclosing_size_of() {
359            // The first value from the iterator gives us an interior pointer.
360            // `ops.malloc_enclosing_size_of()` then gives us the storage size.
361            // This assumes that the `HashSet`'s contents (values and hashes)
362            // are all stored in a single contiguous heap allocation.
363            self.iter()
364                .next()
365                .map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
366        } else {
367            // An estimate.
368            self.capacity() * (size_of::<T>() + size_of::<usize>())
369        }
370    }
371}
372
373#[cfg(feature = "std")]
374impl<T, S> MallocSizeOf for std::collections::HashSet<T, S>
375where
376    T: Eq + Hash + MallocSizeOf,
377    S: BuildHasher,
378{
379    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
380        let mut n = self.shallow_size_of(ops);
381        if let Some(t) = T::constant_size() {
382            n += self.len() * t;
383        } else {
384            n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
385        }
386        n
387    }
388}
389
390impl<I: MallocSizeOf> MallocSizeOf for rstd::cmp::Reverse<I> {
391    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
392        self.0.size_of(ops)
393    }
394    fn constant_size() -> Option<usize> {
395        I::constant_size()
396    }
397}
398
399#[cfg(feature = "std")]
400impl<K, V, S> MallocShallowSizeOf for std::collections::HashMap<K, V, S> {
401    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
402        // See the implementation for std::collections::HashSet for details.
403        if ops.has_malloc_enclosing_size_of() {
404            self.values()
405                .next()
406                .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
407        } else {
408            self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
409        }
410    }
411}
412
413#[cfg(feature = "std")]
414impl<K, V, S> MallocSizeOf for std::collections::HashMap<K, V, S>
415where
416    K: MallocSizeOf,
417    V: MallocSizeOf,
418{
419    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
420        let mut n = self.shallow_size_of(ops);
421        if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
422            n += self.len() * (k + v)
423        } else {
424            n = self
425                .iter()
426                .fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
427        }
428        n
429    }
430}
431
432impl<K, V> MallocShallowSizeOf for rstd::collections::BTreeMap<K, V> {
433    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
434        if ops.has_malloc_enclosing_size_of() {
435            self.values()
436                .next()
437                .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
438        } else {
439            self.len() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
440        }
441    }
442}
443
444impl<K, V> MallocSizeOf for rstd::collections::BTreeMap<K, V>
445where
446    K: MallocSizeOf,
447    V: MallocSizeOf,
448{
449    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
450        let mut n = self.shallow_size_of(ops);
451        if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
452            n += self.len() * (k + v)
453        } else {
454            n = self
455                .iter()
456                .fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
457        }
458        n
459    }
460}
461
462impl<T> MallocShallowSizeOf for rstd::collections::BTreeSet<T> {
463    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
464        if ops.has_malloc_enclosing_size_of() {
465            // See implementation for HashSet how this works.
466            self.iter()
467                .next()
468                .map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
469        } else {
470            // An estimate.
471            self.len() * (size_of::<T>() + size_of::<usize>())
472        }
473    }
474}
475
476impl<T> MallocSizeOf for rstd::collections::BTreeSet<T>
477where
478    T: MallocSizeOf,
479{
480    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
481        let mut n = self.shallow_size_of(ops);
482        if let Some(t) = T::constant_size() {
483            n += self.len() * t;
484        } else {
485            n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
486        }
487        n
488    }
489}
490
491// XXX: we don't want MallocSizeOf to be defined for Rc and Arc. If negative
492// trait bounds are ever allowed, this code should be uncommented.
493// (We do have a compile-fail test for this:
494// rc_arc_must_not_derive_malloc_size_of.rs)
495// impl<T> !MallocSizeOf for Arc<T> { }
496// impl<T> !MallocShallowSizeOf for Arc<T> { }
497
498#[cfg(feature = "std")]
499impl<T: MallocSizeOf> MallocUnconditionalSizeOf for Arc<T> {
500    fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
501        self.unconditional_shallow_size_of(ops) + (**self).size_of(ops)
502    }
503}
504
505/// If a mutex is stored directly as a member of a data type that is being
506/// measured, it is the unique owner of its contents and deserves to be
507/// measured.
508///
509/// If a mutex is stored inside of an Arc value as a member of a data type that
510/// is being measured, the Arc will not be automatically measured so there is no
511/// risk of overcounting the mutex's contents.
512///
513/// The same reasoning applies to RwLock.
514#[cfg(feature = "std")]
515impl<T: MallocSizeOf> MallocSizeOf for std::sync::Mutex<T> {
516    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
517        self.lock().unwrap().size_of(ops)
518    }
519}
520
521#[cfg(feature = "std")]
522impl<T: MallocSizeOf> MallocSizeOf for parking_lot::Mutex<T> {
523    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
524        self.lock().size_of(ops)
525    }
526}
527
528#[cfg(feature = "std")]
529impl<T: MallocSizeOf> MallocSizeOf for once_cell::sync::OnceCell<T> {
530    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
531        self.get().map(|v| v.size_of(ops)).unwrap_or(0)
532    }
533}
534
535#[cfg(feature = "std")]
536impl<T: MallocSizeOf> MallocSizeOf for std::sync::RwLock<T> {
537    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
538        self.read().unwrap().size_of(ops)
539    }
540}
541
542#[cfg(feature = "std")]
543impl<T: MallocSizeOf> MallocSizeOf for parking_lot::RwLock<T> {
544    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
545        self.read().size_of(ops)
546    }
547}
548
549/// Implement notion of 0 allocation size for some type(s).
550///
551/// if used for generics, by default it will require that generaic arguments
552/// should implement `MallocSizeOf`. This can be avoided with passing "any: "
553/// in front of type list.
554///
555/// ```rust
556/// use iota_util_mem::{malloc_size, malloc_size_of_is_0};
557///
558/// struct Data<P> {
559/// 	phantom: std::marker::PhantomData<P>,
560/// }
561///
562/// malloc_size_of_is_0!(any: Data<P>);
563///
564/// // MallocSizeOf is NOT implemented for [u8; 333]
565/// assert_eq!(malloc_size(&Data::<[u8; 333]> { phantom: std::marker::PhantomData }), 0);
566/// ```
567///
568/// and when no "any: "
569///
570/// ```rust
571/// use iota_util_mem::{malloc_size, malloc_size_of_is_0};
572///
573/// struct Data<T>(pub T);
574///
575/// // generic argument (`T`) must be `impl MallocSizeOf`
576/// malloc_size_of_is_0!(Data<u8>);
577///
578/// assert_eq!(malloc_size(&Data(0u8)), 0);
579/// ```
580#[macro_export]
581macro_rules! malloc_size_of_is_0(
582	($($ty:ty),+) => (
583		$(
584			impl $crate::MallocSizeOf for $ty {
585				#[inline(always)]
586				fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
587					0
588				}
589				#[inline(always)]
590				fn constant_size() -> Option<usize> { Some(0) }
591			}
592		)+
593	);
594	(any: $($ty:ident<$($gen:ident),+>),+) => (
595		$(
596			impl<$($gen),+> $crate::MallocSizeOf for $ty<$($gen),+> {
597				#[inline(always)]
598				fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
599					0
600				}
601				#[inline(always)]
602				fn constant_size() -> Option<usize> { Some(0) }
603			}
604		)+
605	);
606	($($ty:ident<$($gen:ident),+>),+) => (
607		$(
608			impl<$($gen: $crate::MallocSizeOf),+> $crate::MallocSizeOf for $ty<$($gen),+> {
609				#[inline(always)]
610				fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
611					0
612				}
613				#[inline(always)]
614				fn constant_size() -> Option<usize> { Some(0) }
615			}
616		)+
617	);
618);
619
620malloc_size_of_is_0!(bool, char, str);
621malloc_size_of_is_0!(u8, u16, u32, u64, u128, usize);
622malloc_size_of_is_0!(i8, i16, i32, i64, i128, isize);
623malloc_size_of_is_0!(f32, f64);
624
625malloc_size_of_is_0!(rstd::sync::atomic::AtomicBool);
626malloc_size_of_is_0!(rstd::sync::atomic::AtomicIsize);
627malloc_size_of_is_0!(rstd::sync::atomic::AtomicUsize);
628
629malloc_size_of_is_0!(Range<u8>, Range<u16>, Range<u32>, Range<u64>, Range<usize>);
630malloc_size_of_is_0!(Range<i8>, Range<i16>, Range<i32>, Range<i64>, Range<isize>);
631malloc_size_of_is_0!(Range<f32>, Range<f64>);
632malloc_size_of_is_0!(any: PhantomData<T>);
633
634/// Measurable that defers to inner value and used to verify MallocSizeOf
635/// implementation in a struct.
636#[derive(Clone)]
637pub struct Measurable<T: MallocSizeOf>(pub T);
638
639impl<T: MallocSizeOf> Deref for Measurable<T> {
640    type Target = T;
641
642    fn deref(&self) -> &T {
643        &self.0
644    }
645}
646
647impl<T: MallocSizeOf> DerefMut for Measurable<T> {
648    fn deref_mut(&mut self) -> &mut T {
649        &mut self.0
650    }
651}
652
653#[cfg(feature = "hashbrown")]
654impl<K, V, S> MallocShallowSizeOf for hashbrown::HashMap<K, V, S> {
655    fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
656        // See the implementation for std::collections::HashSet for details.
657        if ops.has_malloc_enclosing_size_of() {
658            self.values()
659                .next()
660                .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
661        } else {
662            self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
663        }
664    }
665}
666
667#[cfg(feature = "hashbrown")]
668impl<K, V, S> MallocSizeOf for hashbrown::HashMap<K, V, S>
669where
670    K: MallocSizeOf,
671    V: MallocSizeOf,
672{
673    fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
674        let mut n = self.shallow_size_of(ops);
675        if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
676            n += self.len() * (k + v)
677        } else {
678            n = self
679                .iter()
680                .fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
681        }
682        n
683    }
684}
685
686malloc_size_of_is_0!(
687    [u8; 1], [u8; 2], [u8; 3], [u8; 4], [u8; 5], [u8; 6], [u8; 7], [u8; 8], [u8; 9], [u8; 10],
688    [u8; 11], [u8; 12], [u8; 13], [u8; 14], [u8; 15], [u8; 16], [u8; 17], [u8; 18], [u8; 19],
689    [u8; 20], [u8; 21], [u8; 22], [u8; 23], [u8; 24], [u8; 25], [u8; 26], [u8; 27], [u8; 28],
690    [u8; 29], [u8; 30], [u8; 31], [u8; 32]
691);
692
693macro_rules! impl_smallvec {
694    ($size: expr) => {
695        #[cfg(feature = "smallvec")]
696        impl<T> MallocSizeOf for smallvec::SmallVec<[T; $size]>
697        where
698            T: MallocSizeOf,
699        {
700            fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
701                let mut n = if self.spilled() {
702                    self.capacity() * core::mem::size_of::<T>()
703                } else {
704                    0
705                };
706                if let Some(t) = T::constant_size() {
707                    n += self.len() * t;
708                } else {
709                    n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
710                }
711                n
712            }
713        }
714    };
715}
716
717impl_smallvec!(32); // kvdb uses this
718impl_smallvec!(36); // trie-db uses this
719
720#[cfg(feature = "std")]
721malloc_size_of_is_0!(std::time::Instant);
722#[cfg(feature = "std")]
723malloc_size_of_is_0!(std::time::Duration);
724
725#[cfg(all(test, feature = "std"))] // tests are using std implementations
726mod tests {
727    use std::{collections::BTreeSet, mem};
728
729    use smallvec::SmallVec;
730
731    use crate::{MallocSizeOf, MallocSizeOfOps, allocators::new_malloc_size_ops};
732    impl_smallvec!(3);
733
734    #[test]
735    fn test_smallvec_stack_allocated_type() {
736        let mut v: SmallVec<[u8; 3]> = SmallVec::new();
737        let mut ops = new_malloc_size_ops();
738        assert_eq!(v.size_of(&mut ops), 0);
739        v.push(1);
740        v.push(2);
741        v.push(3);
742        assert_eq!(v.size_of(&mut ops), 0);
743        assert!(!v.spilled());
744        v.push(4);
745        assert!(
746            v.spilled(),
747            "SmallVec spills when going beyond the capacity of the inner backing array"
748        );
749        assert_eq!(v.size_of(&mut ops), 4); // 4 u8s on the heap
750    }
751
752    #[test]
753    fn test_smallvec_boxed_stack_allocated_type() {
754        let mut v: SmallVec<[Box<u8>; 3]> = SmallVec::new();
755        let mut ops = new_malloc_size_ops();
756        assert_eq!(v.size_of(&mut ops), 0);
757        v.push(Box::new(1u8));
758        v.push(Box::new(2u8));
759        v.push(Box::new(3u8));
760        assert!(v.size_of(&mut ops) >= 3);
761        assert!(!v.spilled());
762        v.push(Box::new(4u8));
763        assert!(
764            v.spilled(),
765            "SmallVec spills when going beyond the capacity of the inner backing array"
766        );
767        let mut ops = new_malloc_size_ops();
768        let expected_min_allocs = mem::size_of::<Box<u8>>() * 4 + 4;
769        assert!(v.size_of(&mut ops) >= expected_min_allocs);
770    }
771
772    #[test]
773    fn test_smallvec_heap_allocated_type() {
774        let mut v: SmallVec<[String; 3]> = SmallVec::new();
775        let mut ops = new_malloc_size_ops();
776        assert_eq!(v.size_of(&mut ops), 0);
777        v.push("COW".into());
778        v.push("PIG".into());
779        v.push("DUCK".into());
780        assert!(!v.spilled());
781        assert!(v.size_of(&mut ops) >= "COW".len() + "PIG".len() + "DUCK".len());
782        v.push("ÖWL".into());
783        assert!(v.spilled());
784        let mut ops = new_malloc_size_ops();
785        let expected_min_allocs =
786            mem::size_of::<String>() * 4 + "ÖWL".len() + "COW".len() + "PIG".len() + "DUCK".len();
787        assert!(v.size_of(&mut ops) >= expected_min_allocs);
788    }
789
790    #[test]
791    fn test_large_vec() {
792        const N: usize = 128 * 1024 * 1024;
793        let val = vec![1u8; N];
794        let mut ops = new_malloc_size_ops();
795        assert!(val.size_of(&mut ops) >= N);
796        assert!(val.size_of(&mut ops) < 2 * N);
797    }
798
799    #[test]
800    fn btree_set() {
801        let mut set = BTreeSet::new();
802        for t in 0..100 {
803            set.insert(vec![t]);
804        }
805        // ~36 per value
806        assert!(crate::malloc_size(&set) > 3000);
807    }
808
809    #[test]
810    fn special_malloc_size_of_0() {
811        struct Data<P> {
812            phantom: std::marker::PhantomData<P>,
813        }
814
815        malloc_size_of_is_0!(any: Data<P>);
816
817        // MallocSizeOf is not implemented for [u8; 333]
818        assert_eq!(
819            crate::malloc_size(&Data::<[u8; 333]> {
820                phantom: std::marker::PhantomData
821            }),
822            0
823        );
824    }
825
826    #[test]
827    fn constant_size() {
828        struct AlwaysTwo(Vec<u8>);
829
830        impl MallocSizeOf for AlwaysTwo {
831            fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
832                self.0.size_of(ops)
833            }
834            fn constant_size() -> Option<usize> {
835                Some(2)
836            }
837        }
838
839        assert_eq!(AlwaysTwo::constant_size(), Some(2));
840        assert_eq!(std::cmp::Reverse::<u8>::constant_size(), Some(0));
841        assert_eq!(std::cell::RefCell::<u8>::constant_size(), Some(0));
842        assert_eq!(std::cell::Cell::<u8>::constant_size(), Some(0));
843        assert_eq!(Result::<(), ()>::constant_size(), Some(0));
844        assert_eq!(
845            <(AlwaysTwo, (), [u8; 32], AlwaysTwo)>::constant_size(),
846            Some(2 + 2)
847        );
848        assert_eq!(Option::<u8>::constant_size(), Some(0));
849        assert_eq!(<&String>::constant_size(), Some(0));
850
851        assert_eq!(<String>::constant_size(), None);
852        assert_eq!(std::borrow::Cow::<String>::constant_size(), None);
853        assert_eq!(Result::<(), String>::constant_size(), None);
854        assert_eq!(Option::<AlwaysTwo>::constant_size(), None);
855    }
856}